Compare commits

..

No commits in common. "main" and "7.0.0b1" have entirely different histories.

118 changed files with 607 additions and 975 deletions

View File

@ -11,26 +11,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: '3.x'
- run: pip install -U ruff==0.1.8
- run: pip install -U ruff==0.0.284
- name: Run ruff
run: ruff docker tests
build:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.x'
- run: pip3 install build && python -m build .
- uses: actions/upload-artifact@v4
with:
name: dist
path: dist
unit-tests:
runs-on: ubuntu-latest
strategy:
@ -40,14 +27,14 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
pip3 install '.[ssh,dev]'
pip3 install -r test-requirements.txt -r requirements.txt
- name: Run unit tests
run: |
docker logout
@ -62,9 +49,6 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
fetch-tags: true
- name: make ${{ matrix.variant }}
run: |
docker logout

View File

@ -22,18 +22,16 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Generate Package
- name: Generate Pacakge
run: |
pip3 install build
python -m build .
pip3 install wheel
python setup.py sdist bdist_wheel
env:
# This is also supported by Hatch; see
# https://github.com/ofek/hatch-vcs#version-source-environment-variables
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ inputs.tag }}
SETUPTOOLS_SCM_PRETEND_VERSION_FOR_DOCKER: ${{ inputs.tag }}
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1

View File

@ -4,14 +4,14 @@ sphinx:
configuration: docs/conf.py
build:
os: ubuntu-22.04
os: ubuntu-20.04
tools:
python: '3.12'
python: '3.10'
python:
install:
- requirements: docs-requirements.txt
- method: pip
path: .
extra_requirements:
- ssh
- docs

View File

@ -1,13 +1,17 @@
# syntax=docker/dockerfile:1
ARG PYTHON_VERSION=3.12
FROM python:${PYTHON_VERSION}
WORKDIR /src
COPY . .
ARG VERSION=0.0.0.dev0
RUN --mount=type=cache,target=/cache/pip \
PIP_CACHE_DIR=/cache/pip \
SETUPTOOLS_SCM_PRETEND_VERSION=${VERSION} \
pip install .[ssh]
COPY requirements.txt /src/requirements.txt
RUN pip install --no-cache-dir -r requirements.txt
COPY test-requirements.txt /src/test-requirements.txt
RUN pip install --no-cache-dir -r test-requirements.txt
COPY . .
ARG SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER
RUN pip install --no-cache-dir .

View File

@ -11,12 +11,7 @@ RUN addgroup --gid $gid sphinx \
&& useradd --uid $uid --gid $gid -M sphinx
WORKDIR /src
COPY . .
ARG VERSION=0.0.0.dev0
RUN --mount=type=cache,target=/cache/pip \
PIP_CACHE_DIR=/cache/pip \
SETUPTOOLS_SCM_PRETEND_VERSION=${VERSION} \
pip install .[ssh,docs]
COPY requirements.txt docs-requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt -r docs-requirements.txt
USER sphinx

9
MANIFEST.in Normal file
View File

@ -0,0 +1,9 @@
include test-requirements.txt
include requirements.txt
include README.md
include README.rst
include LICENSE
recursive-include tests *.py
recursive-include tests/unit/testdata *
recursive-include tests/integration/testdata *
recursive-include tests/gpg-keys *

View File

@ -1,5 +1,5 @@
TEST_API_VERSION ?= 1.45
TEST_ENGINE_VERSION ?= 26.1
TEST_API_VERSION ?= 1.41
TEST_ENGINE_VERSION ?= 20.10
ifeq ($(OS),Windows_NT)
PLATFORM := Windows
@ -11,17 +11,12 @@ ifeq ($(PLATFORM),Linux)
uid_args := "--build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g)"
endif
SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER ?= $(shell git describe --match '[0-9]*' --dirty='.m' --always --tags 2>/dev/null | sed -r 's/-([0-9]+)/.dev\1/' | sed 's/-/+/')
ifeq ($(SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER),)
SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER = "0.0.0.dev0"
endif
.PHONY: all
all: test
.PHONY: clean
clean:
-docker rm -f dpy-dind dpy-dind-certs dpy-dind-ssl
-docker rm -f dpy-dind-py3 dpy-dind-certs dpy-dind-ssl
find -name "__pycache__" | xargs rm -rf
.PHONY: build-dind-ssh
@ -30,46 +25,35 @@ build-dind-ssh:
--pull \
-t docker-dind-ssh \
-f tests/Dockerfile-ssh-dind \
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
--build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} \
--build-arg API_VERSION=${TEST_API_VERSION} \
.
--build-arg APT_MIRROR .
.PHONY: build
build:
.PHONY: build-py3
build-py3:
docker build \
--pull \
-t docker-sdk-python3 \
-f tests/Dockerfile \
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
.
--build-arg APT_MIRROR .
.PHONY: build-docs
build-docs:
docker build \
-t docker-sdk-python-docs \
-f Dockerfile-docs \
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
$(uid_args) \
.
docker build -t docker-sdk-python-docs -f Dockerfile-docs $(uid_args) .
.PHONY: build-dind-certs
build-dind-certs:
docker build \
-t dpy-dind-certs \
-f tests/Dockerfile-dind-certs \
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
.
docker build -t dpy-dind-certs -f tests/Dockerfile-dind-certs .
.PHONY: test
test: ruff unit-test integration-dind integration-dind-ssl
test: ruff unit-test-py3 integration-dind integration-dind-ssl
.PHONY: unit-test
unit-test: build
.PHONY: unit-test-py3
unit-test-py3: build-py3
docker run -t --rm docker-sdk-python3 py.test tests/unit
.PHONY: integration-test
integration-test: build
.PHONY: integration-test-py3
integration-test-py3: build-py3
docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file}
.PHONY: setup-network
@ -77,12 +61,15 @@ setup-network:
docker network inspect dpy-tests || docker network create dpy-tests
.PHONY: integration-dind
integration-dind: build setup-network
docker rm -vf dpy-dind || :
integration-dind: integration-dind-py3
.PHONY: integration-dind-py3
integration-dind-py3: build-py3 setup-network
docker rm -vf dpy-dind-py3 || :
docker run \
--detach \
--name dpy-dind \
--name dpy-dind-py3 \
--network dpy-tests \
--pull=always \
--privileged \
@ -95,10 +82,10 @@ integration-dind: build setup-network
--rm \
--tty \
busybox \
sh -c 'while ! nc -z dpy-dind 2375; do sleep 1; done'
sh -c 'while ! nc -z dpy-dind-py3 2375; do sleep 1; done'
docker run \
--env="DOCKER_HOST=tcp://dpy-dind:2375" \
--env="DOCKER_HOST=tcp://dpy-dind-py3:2375" \
--env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
--network dpy-tests \
--rm \
@ -106,11 +93,11 @@ integration-dind: build setup-network
docker-sdk-python3 \
py.test tests/integration/${file}
docker rm -vf dpy-dind
docker rm -vf dpy-dind-py3
.PHONY: integration-dind-ssh
integration-dind-ssh: build-dind-ssh build setup-network
integration-dind-ssh: build-dind-ssh build-py3 setup-network
docker rm -vf dpy-dind-ssh || :
docker run -d --network dpy-tests --name dpy-dind-ssh --privileged \
docker-dind-ssh dockerd --experimental
@ -129,7 +116,7 @@ integration-dind-ssh: build-dind-ssh build setup-network
.PHONY: integration-dind-ssl
integration-dind-ssl: build-dind-certs build setup-network
integration-dind-ssl: build-dind-certs build-py3 setup-network
docker rm -vf dpy-dind-certs dpy-dind-ssl || :
docker run -d --name dpy-dind-certs dpy-dind-certs
@ -177,7 +164,7 @@ integration-dind-ssl: build-dind-certs build setup-network
docker rm -vf dpy-dind-ssl dpy-dind-certs
.PHONY: ruff
ruff: build
ruff: build-py3
docker run -t --rm docker-sdk-python3 ruff docker tests
.PHONY: docs
@ -185,5 +172,5 @@ docs: build-docs
docker run --rm -t -v `pwd`:/src docker-sdk-python-docs sphinx-build docs docs/_build
.PHONY: shell
shell: build
shell: build-py3
docker run -it -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 python

View File

@ -6,7 +6,7 @@ A Python library for the Docker Engine API. It lets you do anything the `docker`
## Installation
The latest stable version [is available on PyPI](https://pypi.python.org/pypi/docker/). Install with pip:
The latest stable version [is available on PyPI](https://pypi.python.org/pypi/docker/). Either add `docker` to your `requirements.txt` file or install with pip:
pip install docker

View File

@ -1,6 +1,7 @@
from .api import APIClient
from .client import DockerClient, from_env
from .context import Context, ContextAPI
from .context import Context
from .context import ContextAPI
from .tls import TLSConfig
from .version import __version__

View File

@ -3,7 +3,11 @@ import logging
import os
import random
from .. import auth, constants, errors, utils
from .. import auth
from .. import constants
from .. import errors
from .. import utils
log = logging.getLogger(__name__)
@ -125,16 +129,13 @@ class BuildApiMixin:
raise errors.DockerException(
'Can not use custom encoding if gzip is enabled'
)
if tag is not None:
if not utils.match_tag(tag):
raise errors.DockerException(
f"invalid tag '{tag}': invalid reference format"
)
for key in container_limits.keys():
if key not in constants.CONTAINER_LIMITS_KEYS:
raise errors.DockerException(
f"invalid tag '{tag}': invalid reference format"
f'Invalid container_limits key {key}'
)
if custom_context:
if not fileobj:
raise TypeError("You must specify fileobj with custom_context")
@ -275,24 +276,10 @@ class BuildApiMixin:
return self._stream_helper(response, decode=decode)
@utils.minimum_version('1.31')
def prune_builds(self, filters=None, keep_storage=None, all=None):
def prune_builds(self):
"""
Delete the builder cache
Args:
filters (dict): Filters to process on the prune list.
Needs Docker API v1.39+
Available filters:
- dangling (bool): When set to true (or 1), prune only
unused and untagged images.
- until (str): Can be Unix timestamps, date formatted
timestamps, or Go duration strings (e.g. 10m, 1h30m) computed
relative to the daemon's local time.
keep_storage (int): Amount of disk space in bytes to keep for cache.
Needs Docker API v1.39+
all (bool): Remove all types of build cache.
Needs Docker API v1.39+
Returns:
(dict): A dictionary containing information about the operation's
result. The ``SpaceReclaimed`` key indicates the amount of
@ -303,20 +290,7 @@ class BuildApiMixin:
If the server returns an error.
"""
url = self._url("/build/prune")
if (filters, keep_storage, all) != (None, None, None) \
and utils.version_lt(self._version, '1.39'):
raise errors.InvalidVersion(
'`filters`, `keep_storage`, and `all` args are only available '
'for API version > 1.38'
)
params = {}
if filters is not None:
params['filters'] = utils.convert_filters(filters)
if keep_storage is not None:
params['keep-storage'] = keep_storage
if all is not None:
params['all'] = all
return self._result(self._post(url, params=params), True)
return self._result(self._post(url), True)
def _set_auth_headers(self, headers):
log.debug('Looking for auth config')

View File

@ -8,22 +8,12 @@ import requests.adapters
import requests.exceptions
from .. import auth
from ..constants import (
DEFAULT_MAX_POOL_SIZE,
DEFAULT_NUM_POOLS,
DEFAULT_NUM_POOLS_SSH,
DEFAULT_TIMEOUT_SECONDS,
DEFAULT_USER_AGENT,
IS_WINDOWS_PLATFORM,
MINIMUM_DOCKER_API_VERSION,
STREAM_HEADER_SIZE_BYTES,
)
from ..errors import (
DockerException,
InvalidVersion,
TLSParameterError,
create_api_error_from_http_exception,
)
from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH,
DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS,
DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM,
MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES)
from ..errors import (DockerException, InvalidVersion, TLSParameterError,
create_api_error_from_http_exception)
from ..tls import TLSConfig
from ..transport import UnixHTTPAdapter
from ..utils import check_resource, config, update_headers, utils

View File

@ -1,14 +1,13 @@
from datetime import datetime
from .. import errors, utils
from .. import errors
from .. import utils
from ..constants import DEFAULT_DATA_CHUNK_SIZE
from ..types import (
CancellableStream,
ContainerConfig,
EndpointConfig,
HostConfig,
NetworkingConfig,
)
from ..types import CancellableStream
from ..types import ContainerConfig
from ..types import EndpointConfig
from ..types import HostConfig
from ..types import NetworkingConfig
class ContainerApiMixin:
@ -844,7 +843,7 @@ class ContainerApiMixin:
float (in fractional seconds)
Returns:
(generator of bytes or bytes)
(generator or str)
Raises:
:py:class:`docker.errors.APIError`

View File

@ -1,4 +1,5 @@
from .. import errors, utils
from .. import errors
from .. import utils
from ..types import CancellableStream

View File

@ -47,7 +47,7 @@ class ImageApiMixin:
image (str): The image to show history for
Returns:
(list): The history of the image
(str): The history of the image
Raises:
:py:class:`docker.errors.APIError`

View File

@ -1,6 +1,7 @@
from .. import utils
from ..errors import InvalidVersion
from ..utils import check_resource, minimum_version, version_lt
from ..utils import check_resource, minimum_version
from ..utils import version_lt
from .. import utils
class NetworkApiMixin:

View File

@ -1,6 +1,7 @@
import base64
from .. import errors, utils
from .. import errors
from .. import utils
class SecretApiMixin:

View File

@ -1,8 +1,9 @@
import http.client as http_client
import logging
from .. import errors, types, utils
import http.client as http_client
from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE
from .. import errors
from .. import types
from .. import utils
log = logging.getLogger(__name__)

View File

@ -1,4 +1,5 @@
from .. import errors, utils
from .. import errors
from .. import utils
class VolumeApiMixin:

View File

@ -2,7 +2,8 @@ import base64
import json
import logging
from . import credentials, errors
from . import credentials
from . import errors
from .utils import config
INDEX_NAME = 'docker.io'

View File

@ -1,5 +1,5 @@
from .api.client import APIClient
from .constants import DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS
from .constants import (DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE)
from .models.configs import ConfigCollection
from .models.containers import ContainerCollection
from .models.images import ImageCollection

View File

@ -1,9 +1,8 @@
import sys
from .version import __version__
DEFAULT_DOCKER_API_VERSION = '1.45'
MINIMUM_DOCKER_API_VERSION = '1.24'
DEFAULT_DOCKER_API_VERSION = '1.41'
MINIMUM_DOCKER_API_VERSION = '1.21'
DEFAULT_TIMEOUT_SECONDS = 60
STREAM_HEADER_SIZE_BYTES = 8
CONTAINER_LIMITS_KEYS = [

View File

@ -1,2 +1,2 @@
from .api import ContextAPI
from .context import Context
from .api import ContextAPI

View File

@ -2,14 +2,11 @@ import json
import os
from docker import errors
from .config import (
METAFILE,
get_current_context_name,
get_meta_dir,
write_context_name_to_docker_config,
)
from .context import Context
from docker.context.config import get_meta_dir
from docker.context.config import METAFILE
from docker.context.config import get_current_context_name
from docker.context.config import write_context_name_to_docker_config
from docker.context import Context
class ContextAPI:

View File

@ -1,9 +1,10 @@
import hashlib
import json
import os
import json
import hashlib
from docker import utils
from docker.constants import DEFAULT_UNIX_SOCKET, IS_WINDOWS_PLATFORM
from docker.constants import IS_WINDOWS_PLATFORM
from docker.constants import DEFAULT_UNIX_SOCKET
from docker.utils.config import find_config_file
METAFILE = "meta.json"

View File

@ -1,16 +1,12 @@
import json
import os
import json
from shutil import copyfile, rmtree
from docker.errors import ContextException
from docker.tls import TLSConfig
from .config import (
get_context_host,
get_meta_dir,
get_meta_file,
get_tls_dir,
)
from docker.errors import ContextException
from docker.context.config import get_meta_dir
from docker.context.config import get_meta_file
from docker.context.config import get_tls_dir
from docker.context.config import get_context_host
class Context:

View File

@ -1,8 +1,8 @@
from .store import Store
from .errors import StoreError, CredentialsNotFound
from .constants import (
DEFAULT_LINUX_STORE,
DEFAULT_OSX_STORE,
DEFAULT_WIN32_STORE,
PROGRAM_PREFIX,
)
from .errors import CredentialsNotFound, StoreError
from .store import Store

View File

@ -4,7 +4,8 @@ import shutil
import subprocess
import warnings
from . import constants, errors
from . import constants
from . import errors
from .utils import create_environment_dict

View File

@ -1,5 +1,5 @@
from ..api import APIClient
from .resource import Collection, Model
from .resource import Model, Collection
class Config(Model):
@ -30,7 +30,6 @@ class ConfigCollection(Collection):
def create(self, **kwargs):
obj = self.client.api.create_config(**kwargs)
obj.setdefault("Spec", {})["Name"] = kwargs.get("name")
return self.prepare_model(obj)
create.__doc__ = APIClient.create_config.__doc__

View File

@ -2,19 +2,16 @@ import copy
import ntpath
from collections import namedtuple
from .images import Image
from .resource import Collection, Model
from ..api import APIClient
from ..constants import DEFAULT_DATA_CHUNK_SIZE
from ..errors import (
ContainerError,
DockerException,
ImageNotFound,
NotFound,
create_unexpected_kwargs_error,
ContainerError, DockerException, ImageNotFound,
NotFound, create_unexpected_kwargs_error
)
from ..types import HostConfig, NetworkingConfig
from ..utils import version_gte
from .images import Image
from .resource import Collection, Model
class Container(Model):
@ -181,8 +178,7 @@ class Container(Model):
user (str): User to execute command as. Default: root
detach (bool): If true, detach from the exec command.
Default: False
stream (bool): Stream response data. Ignored if ``detach`` is true.
Default: False
stream (bool): Stream response data. Default: False
socket (bool): Return the connection socket to allow custom
read/write operations. Default: False
environment (dict or list): A dictionary or a list of strings in
@ -314,7 +310,7 @@ class Container(Model):
float (in nanoseconds)
Returns:
(generator of bytes or bytes): Logs from the container.
(generator or str): Logs from the container.
Raises:
:py:class:`docker.errors.APIError`
@ -907,9 +903,9 @@ class ContainerCollection(Collection):
container, exit_status, command, image, out
)
if stream or out is None:
return out
return b''.join(out)
return out if stream or out is None else b''.join(
[line for line in out]
)
def create(self, image, command=None, **kwargs):
"""

View File

@ -51,7 +51,7 @@ class Image(Model):
Show the history of an image.
Returns:
(list): The history of the image.
(str): The history of the image.
Raises:
:py:class:`docker.errors.APIError`
@ -407,8 +407,8 @@ class ImageCollection(Collection):
if match:
image_id = match.group(2)
images.append(image_id)
if 'errorDetail' in chunk:
raise ImageLoadError(chunk['errorDetail']['message'])
if 'error' in chunk:
raise ImageLoadError(chunk['error'])
return [self.get(i) for i in images]

View File

@ -1,7 +1,7 @@
from ..api import APIClient
from ..utils import version_gte
from .containers import Container
from .resource import Collection, Model
from .resource import Model, Collection
class Network(Model):

View File

@ -1,4 +1,4 @@
from .resource import Collection, Model
from .resource import Model, Collection
class Node(Model):

View File

@ -1,5 +1,5 @@
from ..api import APIClient
from .resource import Collection, Model
from .resource import Model, Collection
class Secret(Model):

View File

@ -1,9 +1,7 @@
import copy
from docker.errors import InvalidArgument, create_unexpected_kwargs_error
from docker.types import ContainerSpec, Placement, ServiceMode, TaskTemplate
from .resource import Collection, Model
from docker.errors import create_unexpected_kwargs_error, InvalidArgument
from docker.types import TaskTemplate, ContainerSpec, Placement, ServiceMode
from .resource import Model, Collection
class Service(Model):

View File

@ -1,6 +1,5 @@
from docker.api import APIClient
from docker.errors import APIError
from .resource import Model

View File

@ -1,5 +1,5 @@
from ..api import APIClient
from .resource import Collection, Model
from .resource import Model, Collection
class Volume(Model):

View File

@ -1,5 +1,4 @@
from .unixconn import UnixHTTPAdapter
try:
from .npipeconn import NpipeHTTPAdapter
from .npipesocket import NpipeSocket

View File

@ -6,8 +6,3 @@ class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
super().close()
if hasattr(self, 'pools'):
self.pools.clear()
# Fix for requests 2.32.2+:
# https://github.com/psf/requests/commit/c98e4d133ef29c46a9b68cd783087218a8075e05
def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
return self.get_connection(request.url, proxies)

View File

@ -1,13 +1,13 @@
import queue
import requests.adapters
from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants
from .npipesocket import NpipeSocket
import urllib3
import urllib3.connection
from .. import constants
from .basehttpadapter import BaseHTTPAdapter
from .npipesocket import NpipeSocket
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer

View File

@ -1,12 +1,12 @@
import functools
import io
import time
import io
import pywintypes
import win32api
import win32event
import win32file
import win32pipe
import pywintypes
import win32event
import win32api
cERROR_PIPE_BUSY = 0xe7
cSECURITY_SQOS_PRESENT = 0x100000

View File

@ -1,19 +1,19 @@
import paramiko
import queue
import urllib.parse
import requests.adapters
import logging
import os
import queue
import signal
import socket
import subprocess
import urllib.parse
import paramiko
import requests.adapters
from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants
import urllib3
import urllib3.connection
from .. import constants
from .basehttpadapter import BaseHTTPAdapter
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer

View File

@ -1,11 +1,12 @@
import requests.adapters
import socket
import requests.adapters
from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants
import urllib3
import urllib3.connection
from .. import constants
from .basehttpadapter import BaseHTTPAdapter
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer

View File

@ -1,24 +1,13 @@
from .containers import ContainerConfig, DeviceRequest, HostConfig, LogConfig, Ulimit
from .containers import (
ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest
)
from .daemon import CancellableStream
from .healthcheck import Healthcheck
from .networks import EndpointConfig, IPAMConfig, IPAMPool, NetworkingConfig
from .services import (
ConfigReference,
ContainerSpec,
DNSConfig,
DriverConfig,
EndpointSpec,
Mount,
NetworkAttachmentConfig,
Placement,
PlacementPreference,
Privileges,
Resources,
RestartPolicy,
RollbackConfig,
SecretReference,
ServiceMode,
TaskTemplate,
UpdateConfig,
ConfigReference, ContainerSpec, DNSConfig, DriverConfig, EndpointSpec,
Mount, Placement, PlacementPreference, Privileges, Resources,
RestartPolicy, RollbackConfig, SecretReference, ServiceMode, TaskTemplate,
UpdateConfig, NetworkAttachmentConfig
)
from .swarm import SwarmExternalCA, SwarmSpec
from .swarm import SwarmSpec, SwarmExternalCA

View File

@ -1,16 +1,8 @@
from .. import errors
from ..utils.utils import (
convert_port_bindings,
convert_tmpfs_mounts,
convert_volume_binds,
format_environment,
format_extra_hosts,
normalize_links,
parse_bytes,
parse_devices,
split_command,
version_gte,
version_lt,
convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds,
format_environment, format_extra_hosts, normalize_links, parse_bytes,
parse_devices, split_command, version_gte, version_lt,
)
from .base import DictType
from .healthcheck import Healthcheck

View File

@ -1,12 +1,8 @@
from .. import errors
from ..constants import IS_WINDOWS_PLATFORM
from ..utils import (
check_resource,
convert_service_networks,
format_environment,
format_extra_hosts,
parse_bytes,
split_command,
check_resource, format_environment, format_extra_hosts, parse_bytes,
split_command, convert_service_networks,
)
@ -242,7 +238,6 @@ class Mount(dict):
for the ``volume`` type.
driver_config (DriverConfig): Volume driver configuration. Only valid
for the ``volume`` type.
subpath (str): Path inside a volume to mount instead of the volume root.
tmpfs_size (int or string): The size for the tmpfs mount in bytes.
tmpfs_mode (int): The permission mode for the tmpfs mount.
"""
@ -250,7 +245,7 @@ class Mount(dict):
def __init__(self, target, source, type='volume', read_only=False,
consistency=None, propagation=None, no_copy=False,
labels=None, driver_config=None, tmpfs_size=None,
tmpfs_mode=None, subpath=None):
tmpfs_mode=None):
self['Target'] = target
self['Source'] = source
if type not in ('bind', 'volume', 'tmpfs', 'npipe'):
@ -268,7 +263,7 @@ class Mount(dict):
self['BindOptions'] = {
'Propagation': propagation
}
if any([labels, driver_config, no_copy, tmpfs_size, tmpfs_mode, subpath]):
if any([labels, driver_config, no_copy, tmpfs_size, tmpfs_mode]):
raise errors.InvalidArgument(
'Incompatible options have been provided for the bind '
'type mount.'
@ -281,8 +276,6 @@ class Mount(dict):
volume_opts['Labels'] = labels
if driver_config:
volume_opts['DriverConfig'] = driver_config
if subpath:
volume_opts['Subpath'] = subpath
if volume_opts:
self['VolumeOptions'] = volume_opts
if any([propagation, tmpfs_size, tmpfs_mode]):

View File

@ -1,28 +1,13 @@
from .build import create_archive, exclude_paths, match_tag, mkbuildcontext, tar
from .build import create_archive, exclude_paths, mkbuildcontext, tar
from .decorators import check_resource, minimum_version, update_headers
from .utils import (
compare_version,
convert_filters,
convert_port_bindings,
convert_service_networks,
convert_volume_binds,
create_host_config,
create_ipam_config,
create_ipam_pool,
datetime_to_timestamp,
decode_json_header,
format_environment,
format_extra_hosts,
kwargs_from_env,
normalize_links,
parse_bytes,
parse_devices,
parse_env_file,
parse_host,
parse_repository_tag,
split_command,
version_gte,
version_lt,
compare_version, convert_port_bindings, convert_volume_binds,
parse_repository_tag, parse_host,
kwargs_from_env, convert_filters, datetime_to_timestamp,
create_host_config, parse_bytes, parse_env_file, version_lt,
version_gte, decode_json_header, split_command, create_ipam_config,
create_ipam_pool, parse_devices, normalize_links, convert_service_networks,
format_environment, format_extra_hosts
)

View File

@ -4,19 +4,11 @@ import re
import tarfile
import tempfile
from ..constants import IS_WINDOWS_PLATFORM
from .fnmatch import fnmatch
from ..constants import IS_WINDOWS_PLATFORM
_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/')
_TAG = re.compile(
r"^[a-z0-9]+((\.|_|__|-+)[a-z0-9]+)*"
r"(?::[0-9]+)?(/[a-z0-9]+((\.|_|__|-+)[a-z0-9]+)*)*"
r"(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127})?$"
)
def match_tag(tag: str) -> bool:
return bool(_TAG.match(tag))
def tar(path, exclude=None, dockerfile=None, fileobj=None, gzip=False):

View File

@ -3,6 +3,7 @@ import json.decoder
from ..errors import StreamParseError
json_decoder = json.JSONDecoder()

View File

@ -64,7 +64,7 @@ def read_exactly(socket, n):
Reads exactly n bytes from socket
Raises SocketError if there isn't enough data
"""
data = b""
data = bytes()
while len(data) < n:
next_data = read(socket, n - len(data))
if not next_data:
@ -152,7 +152,7 @@ def consume_socket_output(frames, demux=False):
if demux is False:
# If the streams are multiplexed, the generator returns strings, that
# we just need to concatenate.
return b"".join(frames)
return bytes().join(frames)
# If the streams are demultiplexed, the generator yields tuples
# (stdout, stderr)

View File

@ -5,20 +5,18 @@ import os
import os.path
import shlex
import string
from datetime import datetime, timezone
from functools import lru_cache
from itertools import zip_longest
from urllib.parse import urlparse, urlunparse
from datetime import datetime
from packaging.version import Version
from .. import errors
from ..constants import (
BYTE_UNITS,
DEFAULT_HTTP_HOST,
DEFAULT_NPIPE,
DEFAULT_UNIX_SOCKET,
)
from ..constants import DEFAULT_HTTP_HOST
from ..constants import DEFAULT_UNIX_SOCKET
from ..constants import DEFAULT_NPIPE
from ..constants import BYTE_UNITS
from ..tls import TLSConfig
from urllib.parse import urlparse, urlunparse
URLComponents = collections.namedtuple(
'URLComponents',
'scheme netloc url params query fragment',
@ -45,7 +43,6 @@ def decode_json_header(header):
return json.loads(data)
@lru_cache(maxsize=None)
def compare_version(v1, v2):
"""Compare docker versions
@ -58,20 +55,14 @@ def compare_version(v1, v2):
>>> compare_version(v2, v2)
0
"""
if v1 == v2:
s1 = Version(v1)
s2 = Version(v2)
if s1 == s2:
return 0
# Split into `sys.version_info` like tuples.
s1 = tuple(int(p) for p in v1.split('.'))
s2 = tuple(int(p) for p in v2.split('.'))
# Compare each component, padding with 0 if necessary.
for c1, c2 in zip_longest(s1, s2, fillvalue=0):
if c1 == c2:
continue
elif c1 > c2:
return -1
else:
return 1
return 0
elif s1 > s2:
return -1
else:
return 1
def version_lt(v1, v2):
@ -161,7 +152,7 @@ def convert_volume_binds(binds):
]
if 'propagation' in v and v['propagation'] in propagation_modes:
if mode:
mode = f"{mode},{v['propagation']}"
mode = ','.join([mode, v['propagation']])
else:
mode = v['propagation']
@ -403,8 +394,8 @@ def convert_filters(filters):
def datetime_to_timestamp(dt):
"""Convert a datetime to a Unix timestamp"""
delta = dt.astimezone(timezone.utc) - datetime(1970, 1, 1, tzinfo=timezone.utc)
"""Convert a UTC datetime to a Unix timestamp"""
delta = dt - datetime.utcfromtimestamp(0)
return delta.seconds + delta.days * 24 * 3600

View File

@ -1,7 +1,7 @@
try:
from ._version import __version__
except ImportError:
from importlib.metadata import PackageNotFoundError, version
from importlib.metadata import version, PackageNotFoundError
try:
__version__ = version('docker')
except PackageNotFoundError:

2
docs-requirements.txt Normal file
View File

@ -0,0 +1,2 @@
myst-parser==0.18.0
Sphinx==5.1.1

View File

@ -1,27 +1,6 @@
Changelog
==========
7.1.0
-----
### Upgrade Notes
- Bumped minimum engine API version to 1.24
- Bumped default engine API version to 1.44 (Moby 25.0)
### Bugfixes
- Fixed issue with tag parsing when the registry address includes ports that resulted in `invalid tag format` errors
- Fixed issue preventing creating new configs (`ConfigCollection`), which failed with a `KeyError` due to the `name` field
- Fixed an issue due to an update in the [requests](https://github.com/psf/requests) package breaking `docker-py` by applying the [suggested fix](https://github.com/psf/requests/pull/6710)
### Miscellaneous
- Documentation improvements
- Updated Ruff (linter) and fixed minor linting issues
- Packaging/CI updates
- Started using hatch for packaging (https://github.com/pypa/hatch)
- Updated `setup-python` github action
- Updated tests
- Stopped checking for deprecated container and image related fields (`Container` and `ContainerConfig`)
- Updated tests that check `NetworkSettings.Networks.<network>.Aliases` due to engine changes
7.0.0
-----
### Upgrade Notes
@ -41,11 +20,9 @@ Changelog
- Add `health()` property to container that returns status (e.g. `unhealthy`)
- Add `pause` option to `container.commit()`
- Add support for bind mount propagation (e.g. `rshared`, `private`)
- Add `filters`, `keep_storage`, and `all` parameters to `prune_builds()` (requires API v1.39+)
### Bugfixes
- Consistently return `docker.errors.NotFound` on 404 responses
- Validate tag format before image push
### Miscellaneous
- Upgraded urllib3 version in `requirements.txt` (used for development/tests)

View File

@ -19,7 +19,6 @@ import datetime
import os
import sys
from importlib.metadata import version
sys.path.insert(0, os.path.abspath('..'))

View File

@ -1,100 +1,18 @@
[build-system]
requires = ["hatchling", "hatch-vcs"]
build-backend = "hatchling.build"
requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"]
[project]
name = "docker"
dynamic = ["version"]
description = "A Python library for the Docker Engine API."
readme = "README.md"
license = "Apache-2.0"
requires-python = ">=3.8"
maintainers = [
{ name = "Docker Inc.", email = "no-reply@docker.com" },
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Software Development",
"Topic :: Utilities",
]
dependencies = [
"requests >= 2.26.0",
"urllib3 >= 1.26.0",
"pywin32>=304; sys_platform == \"win32\"",
]
[project.optional-dependencies]
# ssh feature allows DOCKER_HOST=ssh://... style connections
ssh = [
"paramiko>=2.4.3",
]
# tls is always supported, the feature is a no-op for backwards compatibility
tls = []
# websockets can be used as an alternate container attach mechanism but
# by default docker-py hijacks the TCP connection and does not use Websockets
# unless attach_socket(container, ws=True) is called
websockets = [
"websocket-client >= 1.3.0",
]
# docs are dependencies required to build the ReadTheDocs site
# this is only needed for CI / working on the docs!
docs = [
"myst-parser==0.18.0",
"Sphinx==5.1.1",
]
# dev are dependencies required to test & lint this project
# this is only needed if you are making code changes to docker-py!
dev = [
"coverage==7.2.7",
"pytest==7.4.2",
"pytest-cov==4.1.0",
"pytest-timeout==2.1.0",
"ruff==0.1.8",
]
[project.urls]
Changelog = "https://docker-py.readthedocs.io/en/stable/change-log.html"
Documentation = "https://docker-py.readthedocs.io"
Homepage = "https://github.com/docker/docker-py"
Source = "https://github.com/docker/docker-py"
Tracker = "https://github.com/docker/docker-py/issues"
[tool.hatch.version]
source = "vcs"
[tool.hatch.build.hooks.vcs]
version-file = "docker/_version.py"
[tool.hatch.build.targets.sdist]
include = [
"/docker",
]
[tool.setuptools_scm]
write_to = 'docker/_version.py'
[tool.ruff]
target-version = "py38"
target-version = "py37"
extend-select = [
"B",
"C",
"F",
"I",
"UP",
"W",
]
ignore = [
"UP012", # unnecessary `UTF-8` argument (we want to be explicit)
"C901", # too complex (there's a whole bunch of these)
]

6
requirements.txt Normal file
View File

@ -0,0 +1,6 @@
packaging==21.3
paramiko==2.11.0
pywin32==304; sys_platform == 'win32'
requests==2.31.0
urllib3==1.26.18
websocket-client==1.3.3

3
setup.cfg Normal file
View File

@ -0,0 +1,3 @@
[metadata]
description_file = README.rst
license = Apache License 2.0

83
setup.py Normal file
View File

@ -0,0 +1,83 @@
#!/usr/bin/env python
import codecs
import os
from setuptools import find_packages
from setuptools import setup
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
requirements = [
'packaging >= 14.0',
'requests >= 2.26.0',
'urllib3 >= 1.26.0',
]
extras_require = {
# win32 APIs if on Windows (required for npipe support)
':sys_platform == "win32"': 'pywin32>=304',
# This is now a no-op, as similarly the requests[security] extra is
# a no-op as of requests 2.26.0, this is always available/by default now
# see https://github.com/psf/requests/pull/5867
'tls': [],
# Only required when connecting using the ssh:// protocol
'ssh': ['paramiko>=2.4.3'],
# Only required when using websockets
'websockets': ['websocket-client >= 1.3.0'],
}
with open('./test-requirements.txt') as test_reqs_txt:
test_requirements = list(test_reqs_txt)
long_description = ''
with codecs.open('./README.md', encoding='utf-8') as readme_md:
long_description = readme_md.read()
setup(
name="docker",
use_scm_version={
'write_to': 'docker/_version.py'
},
description="A Python library for the Docker Engine API.",
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/docker/docker-py',
project_urls={
'Documentation': 'https://docker-py.readthedocs.io',
'Changelog': 'https://docker-py.readthedocs.io/en/stable/change-log.html',
'Source': 'https://github.com/docker/docker-py',
'Tracker': 'https://github.com/docker/docker-py/issues',
},
packages=find_packages(exclude=["tests.*", "tests"]),
setup_requires=['setuptools_scm'],
install_requires=requirements,
tests_require=test_requirements,
extras_require=extras_require,
python_requires='>=3.8',
zip_safe=False,
test_suite='tests',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: 3.12',
'Topic :: Software Development',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
],
maintainer='Ulysses Souza',
maintainer_email='ulysses.souza@docker.com',
)

6
test-requirements.txt Normal file
View File

@ -0,0 +1,6 @@
setuptools==65.5.1
coverage==7.2.7
ruff==0.0.284
pytest==7.4.2
pytest-cov==4.1.0
pytest-timeout==2.1.0

View File

@ -1,6 +1,7 @@
# syntax=docker/dockerfile:1
ARG PYTHON_VERSION=3.12
FROM python:${PYTHON_VERSION}
RUN apt-get update && apt-get -y install --no-install-recommends \
@ -26,10 +27,16 @@ RUN curl -sSL -o /opt/docker-credential-pass.tar.gz \
chmod +x /usr/local/bin/docker-credential-pass
WORKDIR /src
COPY . .
ARG VERSION=0.0.0.dev0
RUN --mount=type=cache,target=/cache/pip \
PIP_CACHE_DIR=/cache/pip \
SETUPTOOLS_SCM_PRETEND_VERSION=${VERSION} \
pip install .[dev,ssh,websockets]
COPY requirements.txt /src/requirements.txt
RUN --mount=type=cache,target=/root/.cache/pip \
pip install -r requirements.txt
COPY test-requirements.txt /src/test-requirements.txt
RUN --mount=type=cache,target=/root/.cache/pip \
pip install -r test-requirements.txt
COPY . /src
ARG SETUPTOOLS_SCM_PRETEND_VERSION=99.0.0+docker
RUN --mount=type=cache,target=/root/.cache/pip \
pip install -e .

View File

@ -1,7 +1,7 @@
# syntax=docker/dockerfile:1
ARG API_VERSION=1.45
ARG ENGINE_VERSION=26.1
ARG API_VERSION=1.41
ARG ENGINE_VERSION=20.10
FROM docker:${ENGINE_VERSION}-dind

View File

@ -8,11 +8,10 @@ import tarfile
import tempfile
import time
import docker
import paramiko
import pytest
import docker
def make_tree(dirs, files):
base = tempfile.mkdtemp()

View File

@ -3,13 +3,13 @@ import os
import shutil
import tempfile
import pytest
from docker import errors
from docker.utils.proxy import ProxyConfig
import pytest
from .base import BaseAPIIntegrationTest, TEST_IMG
from ..helpers import random_name, requires_api_version, requires_experimental
from .base import TEST_IMG, BaseAPIIntegrationTest
class BuildTest(BaseAPIIntegrationTest):
@ -275,7 +275,7 @@ class BuildTest(BaseAPIIntegrationTest):
pass
info = self.client.inspect_image('build1')
assert 'OnBuild' not in info['Config'] or not info['Config']['OnBuild']
assert not info['Config']['OnBuild']
@requires_api_version('1.25')
def test_build_with_network_mode(self):
@ -389,7 +389,9 @@ class BuildTest(BaseAPIIntegrationTest):
lines = []
for chunk in stream:
lines.append(chunk.get('stream'))
expected = f'{control_chars[0]}{snippet}\n{control_chars[1]}'
expected = '{0}{2}\n{1}'.format(
control_chars[0], control_chars[1], snippet
)
assert any(line == expected for line in lines)
def test_build_gzip_encoding(self):

View File

@ -1,6 +1,5 @@
import pytest
import docker
import pytest
from ..helpers import force_leave_swarm, requires_api_version
from .base import BaseAPIIntegrationTest

View File

@ -9,17 +9,15 @@ import pytest
import requests
import docker
from docker.constants import IS_WINDOWS_PLATFORM
from docker.utils.socket import next_frame_header, read_exactly
from .. import helpers
from ..helpers import (
assert_cat_socket_detached_with_keys,
ctrl_with,
requires_api_version,
skip_if_desktop,
)
from .base import TEST_IMG, BaseAPIIntegrationTest
from ..helpers import assert_cat_socket_detached_with_keys
from ..helpers import ctrl_with
from ..helpers import requires_api_version, skip_if_desktop
from .base import BaseAPIIntegrationTest
from .base import TEST_IMG
from docker.constants import IS_WINDOWS_PLATFORM
from docker.utils.socket import next_frame_header
from docker.utils.socket import read_exactly
class ListContainersTest(BaseAPIIntegrationTest):
@ -620,56 +618,6 @@ class VolumeBindTest(BaseAPIIntegrationTest):
assert mount['Source'] == mount_data['Name']
assert mount_data['RW'] is True
@requires_api_version('1.45')
def test_create_with_subpath_volume_mount(self):
source_volume = helpers.random_name()
self.client.create_volume(name=source_volume)
setup_container = None
test_container = None
# Create a file structure in the volume to test with
setup_container = self.client.create_container(
TEST_IMG,
[
"sh",
"-c",
'mkdir -p /vol/subdir && echo "test content" > /vol/subdir/testfile.txt',
],
host_config=self.client.create_host_config(
binds=[f"{source_volume}:/vol"]
),
)
self.client.start(setup_container)
self.client.wait(setup_container)
# Now test with subpath
mount = docker.types.Mount(
type="volume",
source=source_volume,
target=self.mount_dest,
read_only=True,
subpath="subdir",
)
host_config = self.client.create_host_config(mounts=[mount])
test_container = self.client.create_container(
TEST_IMG,
["cat", os.path.join(self.mount_dest, "testfile.txt")],
host_config=host_config,
)
self.client.start(test_container)
self.client.wait(test_container) # Wait for container to finish
output = self.client.logs(test_container).decode("utf-8").strip()
# If the subpath feature is working, we should be able to see the content
# of the file in the subdir
assert output == "test content"
def check_container_data(self, inspect_data, rw, propagation='rprivate'):
assert 'Mounts' in inspect_data
filtered = list(filter(

View File

@ -1,12 +1,11 @@
from ..helpers import assert_cat_socket_detached_with_keys
from ..helpers import ctrl_with
from ..helpers import requires_api_version
from .base import BaseAPIIntegrationTest
from .base import TEST_IMG
from docker.utils.proxy import ProxyConfig
from docker.utils.socket import next_frame_header, read_exactly
from ..helpers import (
assert_cat_socket_detached_with_keys,
ctrl_with,
requires_api_version,
)
from .base import TEST_IMG, BaseAPIIntegrationTest
from docker.utils.socket import next_frame_header
from docker.utils.socket import read_exactly
class ExecTest(BaseAPIIntegrationTest):

View File

@ -1,5 +1,5 @@
from .base import BaseAPIIntegrationTest, TEST_IMG
from .. import helpers
from .base import TEST_IMG, BaseAPIIntegrationTest
SECOND = 1000000000

View File

@ -2,18 +2,19 @@ import contextlib
import json
import shutil
import socket
import socketserver
import tarfile
import tempfile
import threading
from http.server import SimpleHTTPRequestHandler
import pytest
from http.server import SimpleHTTPRequestHandler
import socketserver
import docker
from ..helpers import requires_api_version, requires_experimental
from .base import TEST_IMG, BaseAPIIntegrationTest
from .base import BaseAPIIntegrationTest, TEST_IMG
class ListImagesTest(BaseAPIIntegrationTest):
@ -84,8 +85,13 @@ class CommitTest(BaseAPIIntegrationTest):
img_id = res['Id']
self.tmp_imgs.append(img_id)
img = self.client.inspect_image(img_id)
assert 'Parent' in img
assert 'Container' in img
assert img['Container'].startswith(id)
assert 'ContainerConfig' in img
assert 'Image' in img['ContainerConfig']
assert TEST_IMG == img['ContainerConfig']['Image']
busybox_id = self.client.inspect_image(TEST_IMG)['Id']
assert 'Parent' in img
assert img['Parent'] == busybox_id
def test_commit_with_changes(self):
@ -97,6 +103,8 @@ class CommitTest(BaseAPIIntegrationTest):
)
self.tmp_imgs.append(img_id)
img = self.client.inspect_image(img_id)
assert 'Container' in img
assert img['Container'].startswith(cid['Id'])
assert '8000/tcp' in img['Config']['ExposedPorts']
assert img['Config']['Cmd'] == ['bash']

View File

@ -1,10 +1,9 @@
import pytest
import docker
from docker.types import IPAMConfig, IPAMPool
import pytest
from ..helpers import random_name, requires_api_version
from .base import TEST_IMG, BaseAPIIntegrationTest
from .base import BaseAPIIntegrationTest, TEST_IMG
class TestNetworks(BaseAPIIntegrationTest):

View File

@ -1,11 +1,10 @@
import os
import docker
import pytest
import docker
from ..helpers import requires_api_version
from .base import BaseAPIIntegrationTest
from ..helpers import requires_api_version
SSHFS = 'vieux/sshfs:latest'

View File

@ -1,6 +1,5 @@
import pytest
import docker
import pytest
from ..helpers import force_leave_swarm, requires_api_version
from .base import BaseAPIIntegrationTest

View File

@ -1,12 +1,13 @@
import random
import time
import docker
import pytest
import docker
from ..helpers import force_leave_swarm, requires_api_version
from .base import TEST_IMG, BaseAPIIntegrationTest
from ..helpers import (
force_leave_swarm, requires_api_version, requires_experimental
)
from .base import BaseAPIIntegrationTest, TEST_IMG
class ServiceTest(BaseAPIIntegrationTest):
@ -140,7 +141,8 @@ class ServiceTest(BaseAPIIntegrationTest):
assert len(services) == 1
assert services[0]['ID'] == svc_id['ID']
@requires_api_version('1.29')
@requires_api_version('1.25')
@requires_experimental(until='1.29')
def test_service_logs(self):
name, svc_id = self.create_simple_service()
assert self.get_service_container(name, include_stopped=True)

View File

@ -1,8 +1,6 @@
import copy
import pytest
import docker
import pytest
from ..helpers import force_leave_swarm, requires_api_version
from .base import BaseAPIIntegrationTest

View File

@ -1,6 +1,5 @@
import pytest
import docker
import pytest
from ..helpers import requires_api_version
from .base import BaseAPIIntegrationTest
@ -17,16 +16,10 @@ class TestVolumes(BaseAPIIntegrationTest):
assert result['Driver'] == 'local'
def test_create_volume_invalid_driver(self):
# special name to avoid exponential timeout loop
# https://github.com/moby/moby/blob/9e00a63d65434cdedc444e79a2b33a7c202b10d8/pkg/plugins/client.go#L253-L254
driver_name = 'this-plugin-does-not-exist'
driver_name = 'invalid.driver'
with pytest.raises(docker.errors.APIError) as cm:
with pytest.raises(docker.errors.NotFound):
self.client.create_volume('perfectcherryblossom', driver_name)
assert (
cm.value.response.status_code == 404 or
cm.value.response.status_code == 400
)
def test_list_volumes(self):
name = 'imperishablenight'

View File

@ -3,9 +3,8 @@ import shutil
import unittest
import docker
from docker.utils import kwargs_from_env
from .. import helpers
from docker.utils import kwargs_from_env
TEST_IMG = 'alpine:3.10'
TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')

View File

@ -1,9 +1,10 @@
import threading
import unittest
from datetime import datetime, timedelta
import docker
from datetime import datetime, timedelta
from ..helpers import requires_api_version
from .base import TEST_API_VERSION

View File

@ -1,10 +1,9 @@
import sys
import warnings
import pytest
import docker.errors
from docker.utils import kwargs_from_env
import pytest
from .base import TEST_IMG

View File

@ -1,12 +1,9 @@
import os
import tempfile
import pytest
from docker import errors
from docker.context import ContextAPI
from docker.tls import TLSConfig
from .base import BaseAPIIntegrationTest

View File

@ -6,11 +6,8 @@ import sys
import pytest
from docker.credentials import (
DEFAULT_LINUX_STORE,
DEFAULT_OSX_STORE,
CredentialsNotFound,
Store,
StoreError,
CredentialsNotFound, Store, StoreError, DEFAULT_LINUX_STORE,
DEFAULT_OSX_STORE
)

View File

@ -1,7 +1,7 @@
import os
from unittest import mock
from docker.credentials.utils import create_environment_dict
from unittest import mock
@mock.patch.dict(os.environ)

View File

@ -1,8 +1,6 @@
import pytest
from docker.errors import APIError
from .base import TEST_IMG, BaseAPIIntegrationTest
from .base import BaseAPIIntegrationTest, TEST_IMG
import pytest
class ErrorsTest(BaseAPIIntegrationTest):

View File

@ -5,9 +5,10 @@ import threading
import pytest
import docker
from ..helpers import random_name, requires_api_version
from .base import TEST_API_VERSION, BaseIntegrationTest
from .base import BaseIntegrationTest
from .base import TEST_API_VERSION
from ..helpers import random_name
from ..helpers import requires_api_version
class ContainerCollectionTest(BaseIntegrationTest):
@ -109,12 +110,12 @@ class ContainerCollectionTest(BaseIntegrationTest):
client.networks.create(net_name)
self.tmp_networks.append(net_name)
test_alias = 'hello'
test_aliases = ['hello']
test_driver_opt = {'key1': 'a'}
networking_config = {
net_name: client.api.create_endpoint_config(
aliases=[test_alias],
aliases=test_aliases,
driver_opt=test_driver_opt
)
}
@ -131,9 +132,8 @@ class ContainerCollectionTest(BaseIntegrationTest):
assert 'NetworkSettings' in attrs
assert 'Networks' in attrs['NetworkSettings']
assert list(attrs['NetworkSettings']['Networks'].keys()) == [net_name]
# Aliases no longer include the container's short-id in API v1.45.
assert attrs['NetworkSettings']['Networks'][net_name]['Aliases'] \
== [test_alias]
assert attrs['NetworkSettings']['Networks'][net_name]['Aliases'] == \
test_aliases
assert attrs['NetworkSettings']['Networks'][net_name]['DriverOpts'] \
== test_driver_opt
@ -190,9 +190,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
assert 'NetworkSettings' in attrs
assert 'Networks' in attrs['NetworkSettings']
assert list(attrs['NetworkSettings']['Networks'].keys()) == [net_name]
# Aliases no longer include the container's short-id in API v1.45.
assert (attrs['NetworkSettings']['Networks'][net_name]['Aliases']
is None)
assert attrs['NetworkSettings']['Networks'][net_name]['Aliases'] is None
assert (attrs['NetworkSettings']['Networks'][net_name]['DriverOpts']
is None)
@ -352,26 +350,14 @@ class ContainerTest(BaseIntegrationTest):
assert exec_output[0] == 0
assert exec_output[1] == b"hello\n"
def test_exec_run_error_code_from_exec(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run(
"alpine", "sh -c 'sleep 20'", detach=True
)
self.tmp_containers.append(container.id)
exec_output = container.exec_run("sh -c 'exit 42'")
assert exec_output[0] == 42
def test_exec_run_failed(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run(
"alpine", "sh -c 'sleep 60'", detach=True
)
self.tmp_containers.append(container.id)
exec_output = container.exec_run("non-existent")
# older versions of docker return `126` in the case that an exec cannot
# be started due to a missing executable. We're fixing this for the
# future, so accept both for now.
assert exec_output[0] == 127 or exec_output[0] == 126
exec_output = container.exec_run("docker ps")
assert exec_output[0] == 126
def test_kill(self):
client = docker.from_env(version=TEST_API_VERSION)

View File

@ -1,12 +1,11 @@
import io
import tempfile
import docker
import pytest
import docker
from .base import BaseIntegrationTest, TEST_IMG, TEST_API_VERSION
from ..helpers import random_name
from .base import TEST_API_VERSION, TEST_IMG, BaseIntegrationTest
class ImageCollectionTest(BaseIntegrationTest):

View File

@ -1,7 +1,6 @@
import docker
from .. import helpers
from .base import TEST_API_VERSION, BaseIntegrationTest
from .base import BaseIntegrationTest, TEST_API_VERSION
class NetworkCollectionTest(BaseIntegrationTest):

View File

@ -1,6 +1,5 @@
import docker
from .base import TEST_API_VERSION, BaseIntegrationTest
from .base import BaseIntegrationTest, TEST_API_VERSION
class ModelTest(BaseIntegrationTest):

View File

@ -1,13 +1,12 @@
import unittest
import pytest
import docker
from docker.errors import InvalidArgument
from docker.types.services import ServiceMode
import pytest
from .. import helpers
from .base import TEST_API_VERSION
from docker.errors import InvalidArgument
from docker.types.services import ServiceMode
class ServiceTest(unittest.TestCase):

View File

@ -1,11 +1,10 @@
import unittest
import pytest
import docker
from .. import helpers
from .base import TEST_API_VERSION
import pytest
class SwarmTest(unittest.TestCase):

View File

@ -1,6 +1,5 @@
import docker
from .base import TEST_API_VERSION, BaseIntegrationTest
from .base import BaseIntegrationTest, TEST_API_VERSION
class VolumesTest(BaseIntegrationTest):

View File

@ -1,11 +1,10 @@
import io
import random
import pytest
import docker
from .base import TEST_IMG, BaseAPIIntegrationTest
from .base import BaseAPIIntegrationTest, TEST_IMG
import pytest
class TestRegressions(BaseAPIIntegrationTest):

View File

@ -3,13 +3,13 @@ import os
import shutil
import tempfile
import pytest
from docker import errors
from docker.utils.proxy import ProxyConfig
import pytest
from .base import BaseAPIIntegrationTest, TEST_IMG
from ..helpers import random_name, requires_api_version, requires_experimental
from .base import TEST_IMG, BaseAPIIntegrationTest
class BuildTest(BaseAPIIntegrationTest):
@ -266,7 +266,7 @@ class BuildTest(BaseAPIIntegrationTest):
pass
info = self.client.inspect_image('build1')
assert 'OnBuild' not in info['Config'] or not info['Config']['OnBuild']
assert not info['Config']['OnBuild']
@requires_api_version('1.25')
def test_build_with_network_mode(self):
@ -380,7 +380,9 @@ class BuildTest(BaseAPIIntegrationTest):
lines = []
for chunk in stream:
lines.append(chunk.get('stream'))
expected = f'{control_chars[0]}{snippet}\n{control_chars[1]}'
expected = '{0}{2}\n{1}'.format(
control_chars[0], control_chars[1], snippet
)
assert any(line == expected for line in lines)
def test_build_gzip_encoding(self):

View File

@ -5,9 +5,8 @@ import unittest
import pytest
import docker
from docker.utils import kwargs_from_env
from .. import helpers
from docker.utils import kwargs_from_env
TEST_IMG = 'alpine:3.10'
TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')

View File

@ -1,11 +1,9 @@
import os
import unittest
import docker
import paramiko.ssh_exception
import pytest
import docker
from .base import TEST_API_VERSION

View File

@ -2,206 +2,181 @@ import gzip
import io
import shutil
import pytest
import docker
from docker import auth, errors
from docker import auth
from docker.api.build import process_dockerfile
import pytest
from ..helpers import make_tree
from .api_test import BaseAPIClientTest, fake_request, url_prefix
class BuildTest(BaseAPIClientTest):
def test_build_container(self):
script = io.BytesIO(
"\n".join(
[
"FROM busybox",
"RUN mkdir -p /tmp/test",
"EXPOSE 8080",
"ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz"
" /tmp/silence.tar.gz",
]
).encode("ascii")
)
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN mkdir -p /tmp/test',
'EXPOSE 8080',
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
' /tmp/silence.tar.gz'
]).encode('ascii'))
self.client.build(fileobj=script)
def test_build_container_pull(self):
script = io.BytesIO(
"\n".join(
[
"FROM busybox",
"RUN mkdir -p /tmp/test",
"EXPOSE 8080",
"ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz"
" /tmp/silence.tar.gz",
]
).encode("ascii")
)
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN mkdir -p /tmp/test',
'EXPOSE 8080',
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
' /tmp/silence.tar.gz'
]).encode('ascii'))
self.client.build(fileobj=script, pull=True)
def test_build_container_custom_context(self):
script = io.BytesIO(
"\n".join(
[
"FROM busybox",
"RUN mkdir -p /tmp/test",
"EXPOSE 8080",
"ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz"
" /tmp/silence.tar.gz",
]
).encode("ascii")
)
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN mkdir -p /tmp/test',
'EXPOSE 8080',
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
' /tmp/silence.tar.gz'
]).encode('ascii'))
context = docker.utils.mkbuildcontext(script)
self.client.build(fileobj=context, custom_context=True)
def test_build_container_custom_context_gzip(self):
script = io.BytesIO(
"\n".join(
[
"FROM busybox",
"RUN mkdir -p /tmp/test",
"EXPOSE 8080",
"ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz"
" /tmp/silence.tar.gz",
]
).encode("ascii")
)
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN mkdir -p /tmp/test',
'EXPOSE 8080',
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
' /tmp/silence.tar.gz'
]).encode('ascii'))
context = docker.utils.mkbuildcontext(script)
gz_context = gzip.GzipFile(fileobj=context)
self.client.build(fileobj=gz_context, custom_context=True, encoding="gzip")
def test_build_remote_with_registry_auth(self):
self.client._auth_configs = auth.AuthConfig(
{
"auths": {
"https://example.com": {
"user": "example",
"password": "example",
"email": "example@example.com",
}
}
}
self.client.build(
fileobj=gz_context,
custom_context=True,
encoding="gzip"
)
expected_params = {
"t": None,
"q": False,
"dockerfile": None,
"rm": False,
"nocache": False,
"pull": False,
"forcerm": False,
"remote": "https://github.com/docker-library/mongo",
}
def test_build_remote_with_registry_auth(self):
self.client._auth_configs = auth.AuthConfig({
'auths': {
'https://example.com': {
'user': 'example',
'password': 'example',
'email': 'example@example.com'
}
}
})
expected_params = {'t': None, 'q': False, 'dockerfile': None,
'rm': False, 'nocache': False, 'pull': False,
'forcerm': False,
'remote': 'https://github.com/docker-library/mongo'}
expected_headers = {
"X-Registry-Config": auth.encode_header(self.client._auth_configs.auths)
'X-Registry-Config': auth.encode_header(
self.client._auth_configs.auths
)
}
self.client.build(path="https://github.com/docker-library/mongo")
self.client.build(path='https://github.com/docker-library/mongo')
fake_request.assert_called_with(
"POST",
'POST',
f"{url_prefix}build",
stream=True,
data=None,
headers=expected_headers,
params=expected_params,
timeout=None,
timeout=None
)
def test_build_container_with_named_dockerfile(self):
self.client.build(".", dockerfile="nameddockerfile")
def test_build_with_invalid_tag(self):
with pytest.raises(errors.DockerException):
self.client.build(".", tag="https://example.com")
self.client.build('.', dockerfile='nameddockerfile')
def test_build_container_with_container_limits(self):
self.client.build(
".",
container_limits={
"memory": 1024 * 1024,
"cpusetcpus": 1,
"cpushares": 1000,
"memswap": 1024 * 1024 * 8,
},
)
self.client.build('.', container_limits={
'memory': 1024 * 1024,
'cpusetcpus': 1,
'cpushares': 1000,
'memswap': 1024 * 1024 * 8
})
def test_build_container_invalid_container_limits(self):
with pytest.raises(docker.errors.DockerException):
self.client.build(".", container_limits={"foo": "bar"})
self.client.build('.', container_limits={
'foo': 'bar'
})
def test_set_auth_headers_with_empty_dict_and_auth_configs(self):
self.client._auth_configs = auth.AuthConfig(
{
"auths": {
"https://example.com": {
"user": "example",
"password": "example",
"email": "example@example.com",
}
self.client._auth_configs = auth.AuthConfig({
'auths': {
'https://example.com': {
'user': 'example',
'password': 'example',
'email': 'example@example.com'
}
}
)
})
headers = {}
expected_headers = {
"X-Registry-Config": auth.encode_header(self.client._auth_configs.auths)
'X-Registry-Config': auth.encode_header(
self.client._auth_configs.auths
)
}
self.client._set_auth_headers(headers)
assert headers == expected_headers
def test_set_auth_headers_with_dict_and_auth_configs(self):
self.client._auth_configs = auth.AuthConfig(
{
"auths": {
"https://example.com": {
"user": "example",
"password": "example",
"email": "example@example.com",
}
self.client._auth_configs = auth.AuthConfig({
'auths': {
'https://example.com': {
'user': 'example',
'password': 'example',
'email': 'example@example.com'
}
}
)
})
headers = {"foo": "bar"}
headers = {'foo': 'bar'}
expected_headers = {
"X-Registry-Config": auth.encode_header(self.client._auth_configs.auths),
"foo": "bar",
'X-Registry-Config': auth.encode_header(
self.client._auth_configs.auths
),
'foo': 'bar'
}
self.client._set_auth_headers(headers)
assert headers == expected_headers
def test_set_auth_headers_with_dict_and_no_auth_configs(self):
headers = {"foo": "bar"}
expected_headers = {"foo": "bar"}
headers = {'foo': 'bar'}
expected_headers = {
'foo': 'bar'
}
self.client._set_auth_headers(headers)
assert headers == expected_headers
@pytest.mark.skipif(
not docker.constants.IS_WINDOWS_PLATFORM, reason="Windows-specific syntax"
)
not docker.constants.IS_WINDOWS_PLATFORM,
reason='Windows-specific syntax')
def test_process_dockerfile_win_longpath_prefix(self):
dirs = [
"foo",
"foo/bar",
"baz",
'foo', 'foo/bar', 'baz',
]
files = [
"Dockerfile",
"foo/Dockerfile.foo",
"foo/bar/Dockerfile.bar",
"baz/Dockerfile.baz",
'Dockerfile', 'foo/Dockerfile.foo', 'foo/bar/Dockerfile.bar',
'baz/Dockerfile.baz',
]
base = make_tree(dirs, files)
@ -211,42 +186,40 @@ class BuildTest(BaseAPIClientTest):
return docker.constants.WINDOWS_LONGPATH_PREFIX + path
assert process_dockerfile(None, pre(base)) == (None, None)
assert process_dockerfile("Dockerfile", pre(base)) == ("Dockerfile", None)
assert process_dockerfile("foo/Dockerfile.foo", pre(base)) == (
"foo/Dockerfile.foo",
None,
assert process_dockerfile('Dockerfile', pre(base)) == (
'Dockerfile', None
)
assert process_dockerfile("../Dockerfile", pre(f"{base}\\foo"))[1] is not None
assert process_dockerfile("../baz/Dockerfile.baz", pre(f"{base}/baz")) == (
"../baz/Dockerfile.baz",
None,
assert process_dockerfile('foo/Dockerfile.foo', pre(base)) == (
'foo/Dockerfile.foo', None
)
assert process_dockerfile(
'../Dockerfile', pre(f"{base}\\foo")
)[1] is not None
assert process_dockerfile(
'../baz/Dockerfile.baz', pre(f"{base}/baz")
) == ('../baz/Dockerfile.baz', None)
def test_process_dockerfile(self):
dirs = [
"foo",
"foo/bar",
"baz",
'foo', 'foo/bar', 'baz',
]
files = [
"Dockerfile",
"foo/Dockerfile.foo",
"foo/bar/Dockerfile.bar",
"baz/Dockerfile.baz",
'Dockerfile', 'foo/Dockerfile.foo', 'foo/bar/Dockerfile.bar',
'baz/Dockerfile.baz',
]
base = make_tree(dirs, files)
self.addCleanup(shutil.rmtree, base)
assert process_dockerfile(None, base) == (None, None)
assert process_dockerfile("Dockerfile", base) == ("Dockerfile", None)
assert process_dockerfile("foo/Dockerfile.foo", base) == (
"foo/Dockerfile.foo",
None,
assert process_dockerfile('Dockerfile', base) == ('Dockerfile', None)
assert process_dockerfile('foo/Dockerfile.foo', base) == (
'foo/Dockerfile.foo', None
)
assert process_dockerfile("../Dockerfile", f"{base}/foo")[1] is not None
assert process_dockerfile("../baz/Dockerfile.baz", f"{base}/baz") == (
"../baz/Dockerfile.baz",
None,
assert process_dockerfile(
'../Dockerfile', f"{base}/foo"
)[1] is not None
assert process_dockerfile('../baz/Dockerfile.baz', f"{base}/baz") == (
'../baz/Dockerfile.baz', None
)

View File

@ -1,22 +1,17 @@
import datetime
import json
import signal
from unittest import mock
import pytest
import docker
from docker.api import APIClient
from unittest import mock
import pytest
from ..helpers import requires_api_version
from . import fake_api
from ..helpers import requires_api_version
from .api_test import (
DEFAULT_TIMEOUT_SECONDS,
BaseAPIClientTest,
fake_inspect_container,
fake_request,
url_base,
url_prefix,
BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS,
fake_inspect_container, url_base
)

View File

@ -2,10 +2,7 @@ import json
from . import fake_api
from .api_test import (
DEFAULT_TIMEOUT_SECONDS,
BaseAPIClientTest,
fake_request,
url_prefix,
BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS,
)

View File

@ -1,17 +1,12 @@
from unittest import mock
import docker
import pytest
import docker
from docker import auth
from . import fake_api
from docker import auth
from unittest import mock
from .api_test import (
DEFAULT_TIMEOUT_SECONDS,
BaseAPIClientTest,
fake_request,
fake_resolve_authconfig,
url_prefix,
BaseAPIClientTest, fake_request, DEFAULT_TIMEOUT_SECONDS, url_prefix,
fake_resolve_authconfig
)

View File

@ -1,9 +1,8 @@
import json
from unittest import mock
from .api_test import BaseAPIClientTest, url_prefix, response
from docker.types import IPAMConfig, IPAMPool
from .api_test import BaseAPIClientTest, response, url_prefix
from unittest import mock
class NetworkTest(BaseAPIClientTest):

View File

@ -1,29 +1,29 @@
import datetime
import http.server
import io
import json
import os
import re
import shutil
import socket
import socketserver
import struct
import tempfile
import threading
import time
import unittest
from unittest import mock
import socketserver
import http.server
import docker
import pytest
import requests
import urllib3
import docker
from docker.api import APIClient
from docker.constants import DEFAULT_DOCKER_API_VERSION
from unittest import mock
from . import fake_api
DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS
@ -82,7 +82,7 @@ def fake_delete(self, url, *args, **kwargs):
def fake_read_from_socket(self, response, stream, tty=False, demux=False):
return b''
return bytes()
url_base = f'{fake_api.prefix}/'

View File

@ -3,7 +3,7 @@ import json
import pytest
from ..helpers import requires_api_version
from .api_test import BaseAPIClientTest, fake_request, url_prefix
from .api_test import BaseAPIClientTest, url_prefix, fake_request
class VolumeTest(BaseAPIClientTest):

View File

@ -6,11 +6,10 @@ import random
import shutil
import tempfile
import unittest
from unittest import mock
import pytest
from docker import auth, credentials, errors
from unittest import mock
import pytest
class RegressionTest(unittest.TestCase):

Some files were not shown because too many files have changed in this diff Show More