mirror of https://github.com/docker/docker-py.git
Merge branch 'main' into patch-1
This commit is contained in:
commit
617c41d222
|
@ -9,3 +9,6 @@ max_line_length = 80
|
|||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.{yaml,yml}]
|
||||
indent_size = 2
|
||||
|
|
|
@ -2,26 +2,53 @@ name: Python package
|
|||
|
||||
on: [push, pull_request]
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: '1'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
flake8:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install -U flake8
|
||||
- name: Run flake8
|
||||
run: flake8 docker/ tests/
|
||||
|
||||
unit-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
max-parallel: 1
|
||||
matrix:
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11.0-alpha - 3.11.0"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip
|
||||
pip3 install -r test-requirements.txt -r requirements.txt
|
||||
- name: Test with pytest
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
docker logout
|
||||
rm -rf ~/.docker
|
||||
py.test -v --cov=docker tests/unit
|
||||
|
||||
integration-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
variant: [ "integration-dind", "integration-dind-ssl", "integration-dind-ssh" ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: make ${{ matrix.variant }}
|
||||
run: |
|
||||
docker logout
|
||||
rm -rf ~/.docker
|
||||
make ${{ matrix.variant }}
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Release Tag WITHOUT `v` Prefix (e.g. 6.0.0)"
|
||||
required: true
|
||||
dry-run:
|
||||
description: 'Dry run'
|
||||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Generate Pacakge
|
||||
run: |
|
||||
pip3 install wheel
|
||||
python setup.py sdist bdist_wheel
|
||||
env:
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION_FOR_DOCKER: ${{ inputs.tag }}
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
if: '! inputs.dry-run'
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
|
||||
- name: Create GitHub release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: '! inputs.dry-run'
|
||||
with:
|
||||
artifacts: "dist/*"
|
||||
generateReleaseNotes: true
|
||||
draft: true
|
||||
commit: ${{ github.sha }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
tag: ${{ inputs.tag }}
|
|
@ -13,6 +13,10 @@ html/*
|
|||
_build/
|
||||
README.rst
|
||||
|
||||
# setuptools_scm
|
||||
_version.py
|
||||
|
||||
env/
|
||||
venv/
|
||||
.idea/
|
||||
*.iml
|
||||
|
|
|
@ -3,8 +3,15 @@ version: 2
|
|||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
build:
|
||||
os: ubuntu-20.04
|
||||
tools:
|
||||
python: '3.10'
|
||||
|
||||
python:
|
||||
version: 3.6
|
||||
install:
|
||||
- requirements: docs-requirements.txt
|
||||
- requirements: requirements.txt
|
||||
- method: pip
|
||||
path: .
|
||||
extra_requirements:
|
||||
- ssh
|
||||
|
|
14
Dockerfile
14
Dockerfile
|
@ -1,15 +1,17 @@
|
|||
ARG PYTHON_VERSION=3.7
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG PYTHON_VERSION=3.10
|
||||
|
||||
FROM python:${PYTHON_VERSION}
|
||||
|
||||
RUN mkdir /src
|
||||
WORKDIR /src
|
||||
|
||||
COPY requirements.txt /src/requirements.txt
|
||||
RUN pip install -r requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY test-requirements.txt /src/test-requirements.txt
|
||||
RUN pip install -r test-requirements.txt
|
||||
RUN pip install --no-cache-dir -r test-requirements.txt
|
||||
|
||||
COPY . /src
|
||||
RUN pip install .
|
||||
COPY . .
|
||||
ARG SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER
|
||||
RUN pip install --no-cache-dir .
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
ARG PYTHON_VERSION=3.7
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG PYTHON_VERSION=3.10
|
||||
|
||||
FROM python:${PYTHON_VERSION}
|
||||
|
||||
|
@ -10,6 +12,6 @@ RUN addgroup --gid $gid sphinx \
|
|||
|
||||
WORKDIR /src
|
||||
COPY requirements.txt docs-requirements.txt ./
|
||||
RUN pip install -r requirements.txt -r docs-requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt -r docs-requirements.txt
|
||||
|
||||
USER sphinx
|
||||
|
|
|
@ -25,7 +25,7 @@ def buildImages = { ->
|
|||
imageDindSSH = "${imageNameBase}:sshdind-${gitCommit()}"
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
buildImage(imageDindSSH, "-f tests/Dockerfile-ssh-dind .", "")
|
||||
buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7")
|
||||
buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.10 .", "py3.10")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ def runTests = { Map settings ->
|
|||
throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '19.03.12')`")
|
||||
}
|
||||
if (!pythonVersion) {
|
||||
throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py3.7')`")
|
||||
throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py3.x')`")
|
||||
}
|
||||
|
||||
{ ->
|
||||
|
|
137
Makefile
137
Makefile
|
@ -1,5 +1,15 @@
|
|||
TEST_API_VERSION ?= 1.41
|
||||
TEST_ENGINE_VERSION ?= 20.10.05
|
||||
TEST_ENGINE_VERSION ?= 20.10
|
||||
|
||||
ifeq ($(OS),Windows_NT)
|
||||
PLATFORM := Windows
|
||||
else
|
||||
PLATFORM := $(shell sh -c 'uname -s 2>/dev/null || echo Unknown')
|
||||
endif
|
||||
|
||||
ifeq ($(PLATFORM),Linux)
|
||||
uid_args := "--build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g)"
|
||||
endif
|
||||
|
||||
.PHONY: all
|
||||
all: test
|
||||
|
@ -11,15 +21,25 @@ clean:
|
|||
|
||||
.PHONY: build-dind-ssh
|
||||
build-dind-ssh:
|
||||
docker build -t docker-dind-ssh -f tests/Dockerfile-ssh-dind --build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} --build-arg API_VERSION=${TEST_API_VERSION} --build-arg APT_MIRROR .
|
||||
docker build \
|
||||
--pull \
|
||||
-t docker-dind-ssh \
|
||||
-f tests/Dockerfile-ssh-dind \
|
||||
--build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} \
|
||||
--build-arg API_VERSION=${TEST_API_VERSION} \
|
||||
--build-arg APT_MIRROR .
|
||||
|
||||
.PHONY: build-py3
|
||||
build-py3:
|
||||
docker build -t docker-sdk-python3 -f tests/Dockerfile --build-arg APT_MIRROR .
|
||||
docker build \
|
||||
--pull \
|
||||
-t docker-sdk-python3 \
|
||||
-f tests/Dockerfile \
|
||||
--build-arg APT_MIRROR .
|
||||
|
||||
.PHONY: build-docs
|
||||
build-docs:
|
||||
docker build -t docker-sdk-python-docs -f Dockerfile-docs --build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g) .
|
||||
docker build -t docker-sdk-python-docs -f Dockerfile-docs $(uid_args) .
|
||||
|
||||
.PHONY: build-dind-certs
|
||||
build-dind-certs:
|
||||
|
@ -46,38 +66,101 @@ integration-dind: integration-dind-py3
|
|||
.PHONY: integration-dind-py3
|
||||
integration-dind-py3: build-py3 setup-network
|
||||
docker rm -vf dpy-dind-py3 || :
|
||||
docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\
|
||||
docker:${TEST_ENGINE_VERSION}-dind dockerd -H tcp://0.0.0.0:2375 --experimental
|
||||
docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py3:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
|
||||
--network dpy-tests docker-sdk-python3 py.test tests/integration/${file}
|
||||
|
||||
docker run \
|
||||
--detach \
|
||||
--name dpy-dind-py3 \
|
||||
--network dpy-tests \
|
||||
--pull=always \
|
||||
--privileged \
|
||||
docker:${TEST_ENGINE_VERSION}-dind \
|
||||
dockerd -H tcp://0.0.0.0:2375 --experimental
|
||||
|
||||
# Wait for Docker-in-Docker to come to life
|
||||
docker run \
|
||||
--network dpy-tests \
|
||||
--rm \
|
||||
--tty \
|
||||
busybox \
|
||||
sh -c 'while ! nc -z dpy-dind-py3 2375; do sleep 1; done'
|
||||
|
||||
docker run \
|
||||
--env="DOCKER_HOST=tcp://dpy-dind-py3:2375" \
|
||||
--env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
|
||||
--network dpy-tests \
|
||||
--rm \
|
||||
--tty \
|
||||
docker-sdk-python3 \
|
||||
py.test tests/integration/${file}
|
||||
|
||||
docker rm -vf dpy-dind-py3
|
||||
|
||||
|
||||
.PHONY: integration-ssh-py3
|
||||
integration-ssh-py3: build-dind-ssh build-py3 setup-network
|
||||
docker rm -vf dpy-dind-py3 || :
|
||||
docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\
|
||||
.PHONY: integration-dind-ssh
|
||||
integration-dind-ssh: build-dind-ssh build-py3 setup-network
|
||||
docker rm -vf dpy-dind-ssh || :
|
||||
docker run -d --network dpy-tests --name dpy-dind-ssh --privileged \
|
||||
docker-dind-ssh dockerd --experimental
|
||||
# start SSH daemon
|
||||
docker exec dpy-dind-py3 sh -c "/usr/sbin/sshd"
|
||||
docker run -t --rm --env="DOCKER_HOST=ssh://dpy-dind-py3" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
|
||||
--network dpy-tests docker-sdk-python3 py.test tests/ssh/${file}
|
||||
docker rm -vf dpy-dind-py3
|
||||
# start SSH daemon for known key
|
||||
docker exec dpy-dind-ssh sh -c "/usr/sbin/sshd -h /etc/ssh/known_ed25519 -p 22"
|
||||
docker exec dpy-dind-ssh sh -c "/usr/sbin/sshd -h /etc/ssh/unknown_ed25519 -p 2222"
|
||||
docker run \
|
||||
--tty \
|
||||
--rm \
|
||||
--env="DOCKER_HOST=ssh://dpy-dind-ssh" \
|
||||
--env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
|
||||
--env="UNKNOWN_DOCKER_SSH_HOST=ssh://dpy-dind-ssh:2222" \
|
||||
--network dpy-tests \
|
||||
docker-sdk-python3 py.test tests/ssh/${file}
|
||||
docker rm -vf dpy-dind-ssh
|
||||
|
||||
|
||||
.PHONY: integration-dind-ssl
|
||||
integration-dind-ssl: build-dind-certs build-py3
|
||||
integration-dind-ssl: build-dind-certs build-py3 setup-network
|
||||
docker rm -vf dpy-dind-certs dpy-dind-ssl || :
|
||||
docker run -d --name dpy-dind-certs dpy-dind-certs
|
||||
docker run -d --env="DOCKER_HOST=tcp://localhost:2375" --env="DOCKER_TLS_VERIFY=1"\
|
||||
--env="DOCKER_CERT_PATH=/certs" --volumes-from dpy-dind-certs --name dpy-dind-ssl\
|
||||
--network dpy-tests --network-alias docker -v /tmp --privileged\
|
||||
docker:${TEST_ENGINE_VERSION}-dind\
|
||||
dockerd --tlsverify --tlscacert=/certs/ca.pem --tlscert=/certs/server-cert.pem\
|
||||
--tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375 --experimental
|
||||
docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\
|
||||
--env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
|
||||
--network dpy-tests docker-sdk-python3 py.test tests/integration/${file}
|
||||
|
||||
docker run \
|
||||
--detach \
|
||||
--env="DOCKER_CERT_PATH=/certs" \
|
||||
--env="DOCKER_HOST=tcp://localhost:2375" \
|
||||
--env="DOCKER_TLS_VERIFY=1" \
|
||||
--name dpy-dind-ssl \
|
||||
--network dpy-tests \
|
||||
--network-alias docker \
|
||||
--pull=always \
|
||||
--privileged \
|
||||
--volume /tmp \
|
||||
--volumes-from dpy-dind-certs \
|
||||
docker:${TEST_ENGINE_VERSION}-dind \
|
||||
dockerd \
|
||||
--tlsverify \
|
||||
--tlscacert=/certs/ca.pem \
|
||||
--tlscert=/certs/server-cert.pem \
|
||||
--tlskey=/certs/server-key.pem \
|
||||
-H tcp://0.0.0.0:2375 \
|
||||
--experimental
|
||||
|
||||
# Wait for Docker-in-Docker to come to life
|
||||
docker run \
|
||||
--network dpy-tests \
|
||||
--rm \
|
||||
--tty \
|
||||
busybox \
|
||||
sh -c 'while ! nc -z dpy-dind-ssl 2375; do sleep 1; done'
|
||||
|
||||
docker run \
|
||||
--env="DOCKER_CERT_PATH=/certs" \
|
||||
--env="DOCKER_HOST=tcp://docker:2375" \
|
||||
--env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
|
||||
--env="DOCKER_TLS_VERIFY=1" \
|
||||
--network dpy-tests \
|
||||
--rm \
|
||||
--volumes-from dpy-dind-ssl \
|
||||
--tty \
|
||||
docker-sdk-python3 \
|
||||
py.test tests/integration/${file}
|
||||
|
||||
docker rm -vf dpy-dind-ssl dpy-dind-certs
|
||||
|
||||
.PHONY: flake8
|
||||
|
|
|
@ -10,9 +10,8 @@ The latest stable version [is available on PyPI](https://pypi.python.org/pypi/do
|
|||
|
||||
pip install docker
|
||||
|
||||
If you are intending to connect to a docker host via TLS, add `docker[tls]` to your requirements instead, or install with pip:
|
||||
|
||||
pip install docker[tls]
|
||||
> Older versions (< 6.0) required installing `docker[tls]` for SSL/TLS support.
|
||||
> This is no longer necessary and is a no-op, but is supported for backwards compatibility.
|
||||
|
||||
## Usage
|
||||
|
||||
|
|
13
appveyor.yml
13
appveyor.yml
|
@ -1,13 +0,0 @@
|
|||
version: '{branch}-{build}'
|
||||
|
||||
install:
|
||||
- "SET PATH=C:\\Python37-x64;C:\\Python37-x64\\Scripts;%PATH%"
|
||||
- "python --version"
|
||||
- "python -m pip install --upgrade pip"
|
||||
- "pip install tox==2.9.1"
|
||||
|
||||
# Build the binary after tests
|
||||
build: false
|
||||
|
||||
test_script:
|
||||
- "tox"
|
|
@ -4,7 +4,6 @@ from .client import DockerClient, from_env
|
|||
from .context import Context
|
||||
from .context import ContextAPI
|
||||
from .tls import TLSConfig
|
||||
from .version import version, version_info
|
||||
from .version import __version__
|
||||
|
||||
__version__ = version
|
||||
__title__ = 'docker'
|
||||
|
|
|
@ -76,6 +76,7 @@ class BuildApiMixin:
|
|||
forcerm (bool): Always remove intermediate containers, even after
|
||||
unsuccessful builds
|
||||
dockerfile (str): path within the build context to the Dockerfile
|
||||
gzip (bool): If set to ``True``, gzip compression/encoding is used
|
||||
buildargs (dict): A dictionary of build arguments
|
||||
container_limits (dict): A dictionary of limits applied to each
|
||||
container created by the build process. Valid keys:
|
||||
|
@ -153,7 +154,7 @@ class BuildApiMixin:
|
|||
with open(dockerignore) as f:
|
||||
exclude = list(filter(
|
||||
lambda x: x != '' and x[0] != '#',
|
||||
[l.strip() for l in f.read().splitlines()]
|
||||
[line.strip() for line in f.read().splitlines()]
|
||||
))
|
||||
dockerfile = process_dockerfile(dockerfile, path)
|
||||
context = utils.tar(
|
||||
|
|
|
@ -267,7 +267,7 @@ class APIClient(
|
|||
try:
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
raise create_api_error_from_http_exception(e)
|
||||
raise create_api_error_from_http_exception(e) from e
|
||||
|
||||
def _result(self, response, json=False, binary=False):
|
||||
assert not (json and binary)
|
||||
|
|
|
@ -223,7 +223,7 @@ class ContainerApiMixin:
|
|||
mac_address=None, labels=None, stop_signal=None,
|
||||
networking_config=None, healthcheck=None,
|
||||
stop_timeout=None, runtime=None,
|
||||
use_config_proxy=True):
|
||||
use_config_proxy=True, platform=None):
|
||||
"""
|
||||
Creates a container. Parameters are similar to those for the ``docker
|
||||
run`` command except it doesn't support the attach options (``-a``).
|
||||
|
@ -256,7 +256,9 @@ class ContainerApiMixin:
|
|||
|
||||
.. code-block:: python
|
||||
|
||||
client.api.create_host_config(port_bindings={1111: ('127.0.0.1', 4567)})
|
||||
client.api.create_host_config(
|
||||
port_bindings={1111: ('127.0.0.1', 4567)}
|
||||
)
|
||||
|
||||
Or without host port assignment:
|
||||
|
||||
|
@ -396,6 +398,7 @@ class ContainerApiMixin:
|
|||
configuration file (``~/.docker/config.json`` by default)
|
||||
contains a proxy configuration, the corresponding environment
|
||||
variables will be set in the container being created.
|
||||
platform (str): Platform in the format ``os[/arch[/variant]]``.
|
||||
|
||||
Returns:
|
||||
A dictionary with an image 'Id' key and a 'Warnings' key.
|
||||
|
@ -425,16 +428,22 @@ class ContainerApiMixin:
|
|||
stop_signal, networking_config, healthcheck,
|
||||
stop_timeout, runtime
|
||||
)
|
||||
return self.create_container_from_config(config, name)
|
||||
return self.create_container_from_config(config, name, platform)
|
||||
|
||||
def create_container_config(self, *args, **kwargs):
|
||||
return ContainerConfig(self._version, *args, **kwargs)
|
||||
|
||||
def create_container_from_config(self, config, name=None):
|
||||
def create_container_from_config(self, config, name=None, platform=None):
|
||||
u = self._url("/containers/create")
|
||||
params = {
|
||||
'name': name
|
||||
}
|
||||
if platform:
|
||||
if utils.version_lt(self._version, '1.41'):
|
||||
raise errors.InvalidVersion(
|
||||
'platform is not supported for API version < 1.41'
|
||||
)
|
||||
params['platform'] = platform
|
||||
res = self._post_json(u, data=config, params=params)
|
||||
return self._result(res, True)
|
||||
|
||||
|
@ -579,10 +588,13 @@ class ContainerApiMixin:
|
|||
|
||||
Example:
|
||||
|
||||
>>> client.api.create_host_config(privileged=True, cap_drop=['MKNOD'],
|
||||
volumes_from=['nostalgic_newton'])
|
||||
>>> client.api.create_host_config(
|
||||
... privileged=True,
|
||||
... cap_drop=['MKNOD'],
|
||||
... volumes_from=['nostalgic_newton'],
|
||||
... )
|
||||
{'CapDrop': ['MKNOD'], 'LxcConf': None, 'Privileged': True,
|
||||
'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False}
|
||||
'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False}
|
||||
|
||||
"""
|
||||
if not kwargs:
|
||||
|
@ -814,11 +826,12 @@ class ContainerApiMixin:
|
|||
tail (str or int): Output specified number of lines at the end of
|
||||
logs. Either an integer of number of lines or the string
|
||||
``all``. Default ``all``
|
||||
since (datetime or int): Show logs since a given datetime or
|
||||
integer epoch (in seconds)
|
||||
since (datetime, int, or float): Show logs since a given datetime,
|
||||
integer epoch (in seconds) or float (in fractional seconds)
|
||||
follow (bool): Follow log output. Default ``False``
|
||||
until (datetime or int): Show logs that occurred before the given
|
||||
datetime or integer epoch (in seconds)
|
||||
until (datetime, int, or float): Show logs that occurred before
|
||||
the given datetime, integer epoch (in seconds), or
|
||||
float (in fractional seconds)
|
||||
|
||||
Returns:
|
||||
(generator or str)
|
||||
|
@ -843,9 +856,11 @@ class ContainerApiMixin:
|
|||
params['since'] = utils.datetime_to_timestamp(since)
|
||||
elif (isinstance(since, int) and since > 0):
|
||||
params['since'] = since
|
||||
elif (isinstance(since, float) and since > 0.0):
|
||||
params['since'] = since
|
||||
else:
|
||||
raise errors.InvalidArgument(
|
||||
'since value should be datetime or positive int, '
|
||||
'since value should be datetime or positive int/float, '
|
||||
'not {}'.format(type(since))
|
||||
)
|
||||
|
||||
|
@ -858,9 +873,11 @@ class ContainerApiMixin:
|
|||
params['until'] = utils.datetime_to_timestamp(until)
|
||||
elif (isinstance(until, int) and until > 0):
|
||||
params['until'] = until
|
||||
elif (isinstance(until, float) and until > 0.0):
|
||||
params['until'] = until
|
||||
else:
|
||||
raise errors.InvalidArgument(
|
||||
'until value should be datetime or positive int, '
|
||||
'until value should be datetime or positive int/float, '
|
||||
'not {}'.format(type(until))
|
||||
)
|
||||
|
||||
|
|
|
@ -377,7 +377,8 @@ class ImageApiMixin:
|
|||
|
||||
Example:
|
||||
|
||||
>>> for line in client.api.pull('busybox', stream=True, decode=True):
|
||||
>>> resp = client.api.pull('busybox', stream=True, decode=True)
|
||||
... for line in resp:
|
||||
... print(json.dumps(line, indent=4))
|
||||
{
|
||||
"status": "Pulling image (latest) from busybox",
|
||||
|
@ -456,7 +457,12 @@ class ImageApiMixin:
|
|||
If the server returns an error.
|
||||
|
||||
Example:
|
||||
>>> for line in client.api.push('yourname/app', stream=True, decode=True):
|
||||
>>> resp = client.api.push(
|
||||
... 'yourname/app',
|
||||
... stream=True,
|
||||
... decode=True,
|
||||
... )
|
||||
... for line in resp:
|
||||
... print(line)
|
||||
{'status': 'Pushing repository yourname/app (1 tags)'}
|
||||
{'status': 'Pushing','progressDetail': {}, 'id': '511136ea3c5a'}
|
||||
|
|
|
@ -216,7 +216,8 @@ class NetworkApiMixin:
|
|||
def connect_container_to_network(self, container, net_id,
|
||||
ipv4_address=None, ipv6_address=None,
|
||||
aliases=None, links=None,
|
||||
link_local_ips=None, driver_opt=None):
|
||||
link_local_ips=None, driver_opt=None,
|
||||
mac_address=None):
|
||||
"""
|
||||
Connect a container to a network.
|
||||
|
||||
|
@ -235,13 +236,16 @@ class NetworkApiMixin:
|
|||
network, using the IPv6 protocol. Defaults to ``None``.
|
||||
link_local_ips (:py:class:`list`): A list of link-local
|
||||
(IPv4/IPv6) addresses.
|
||||
mac_address (str): The MAC address of this container on the
|
||||
network. Defaults to ``None``.
|
||||
"""
|
||||
data = {
|
||||
"Container": container,
|
||||
"EndpointConfig": self.create_endpoint_config(
|
||||
aliases=aliases, links=links, ipv4_address=ipv4_address,
|
||||
ipv6_address=ipv6_address, link_local_ips=link_local_ips,
|
||||
driver_opt=driver_opt
|
||||
driver_opt=driver_opt,
|
||||
mac_address=mac_address
|
||||
),
|
||||
}
|
||||
|
||||
|
|
|
@ -51,19 +51,20 @@ class PluginApiMixin:
|
|||
return True
|
||||
|
||||
@utils.minimum_version('1.25')
|
||||
def disable_plugin(self, name):
|
||||
def disable_plugin(self, name, force=False):
|
||||
"""
|
||||
Disable an installed plugin.
|
||||
|
||||
Args:
|
||||
name (string): The name of the plugin. The ``:latest`` tag is
|
||||
optional, and is the default if omitted.
|
||||
force (bool): To enable the force query parameter.
|
||||
|
||||
Returns:
|
||||
``True`` if successful
|
||||
"""
|
||||
url = self._url('/plugins/{0}/disable', name)
|
||||
res = self._post(url)
|
||||
res = self._post(url, params={'force': force})
|
||||
self._raise_for_status(res)
|
||||
return True
|
||||
|
||||
|
|
|
@ -85,7 +85,7 @@ class SwarmApiMixin:
|
|||
def init_swarm(self, advertise_addr=None, listen_addr='0.0.0.0:2377',
|
||||
force_new_cluster=False, swarm_spec=None,
|
||||
default_addr_pool=None, subnet_size=None,
|
||||
data_path_addr=None):
|
||||
data_path_addr=None, data_path_port=None):
|
||||
"""
|
||||
Initialize a new Swarm using the current connected engine as the first
|
||||
node.
|
||||
|
@ -118,6 +118,9 @@ class SwarmApiMixin:
|
|||
networks created from the default subnet pool. Default: None
|
||||
data_path_addr (string): Address or interface to use for data path
|
||||
traffic. For example, 192.168.1.1, or an interface, like eth0.
|
||||
data_path_port (int): Port number to use for data path traffic.
|
||||
Acceptable port range is 1024 to 49151. If set to ``None`` or
|
||||
0, the default port 4789 will be used. Default: None
|
||||
|
||||
Returns:
|
||||
(str): The ID of the created node.
|
||||
|
@ -166,6 +169,14 @@ class SwarmApiMixin:
|
|||
)
|
||||
data['DataPathAddr'] = data_path_addr
|
||||
|
||||
if data_path_port is not None:
|
||||
if utils.version_lt(self._version, '1.40'):
|
||||
raise errors.InvalidVersion(
|
||||
'Data path port is only available for '
|
||||
'API version >= 1.40'
|
||||
)
|
||||
data['DataPathPort'] = data_path_port
|
||||
|
||||
response = self._post_json(url, data=data)
|
||||
return self._result(response, json=True)
|
||||
|
||||
|
|
|
@ -56,15 +56,18 @@ class VolumeApiMixin:
|
|||
|
||||
Example:
|
||||
|
||||
>>> volume = client.api.create_volume(name='foobar', driver='local',
|
||||
driver_opts={'foo': 'bar', 'baz': 'false'},
|
||||
labels={"key": "value"})
|
||||
>>> print(volume)
|
||||
>>> volume = client.api.create_volume(
|
||||
... name='foobar',
|
||||
... driver='local',
|
||||
... driver_opts={'foo': 'bar', 'baz': 'false'},
|
||||
... labels={"key": "value"},
|
||||
... )
|
||||
... print(volume)
|
||||
{u'Driver': u'local',
|
||||
u'Labels': {u'key': u'value'},
|
||||
u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data',
|
||||
u'Name': u'foobar',
|
||||
u'Scope': u'local'}
|
||||
u'Labels': {u'key': u'value'},
|
||||
u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data',
|
||||
u'Name': u'foobar',
|
||||
u'Scope': u'local'}
|
||||
|
||||
"""
|
||||
url = self._url('/volumes/create')
|
||||
|
|
|
@ -383,7 +383,6 @@ def _load_legacy_config(config_file):
|
|||
}}
|
||||
except Exception as e:
|
||||
log.debug(e)
|
||||
pass
|
||||
|
||||
log.debug("All parsing attempts failed - returning empty config")
|
||||
return {}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import sys
|
||||
from .version import version
|
||||
from .version import __version__
|
||||
|
||||
DEFAULT_DOCKER_API_VERSION = '1.41'
|
||||
MINIMUM_DOCKER_API_VERSION = '1.21'
|
||||
|
@ -28,7 +28,7 @@ INSECURE_REGISTRY_DEPRECATION_WARNING = \
|
|||
IS_WINDOWS_PLATFORM = (sys.platform == 'win32')
|
||||
WINDOWS_LONGPATH_PREFIX = '\\\\?\\'
|
||||
|
||||
DEFAULT_USER_AGENT = f"docker-sdk-python/{version}"
|
||||
DEFAULT_USER_AGENT = f"docker-sdk-python/{__version__}"
|
||||
DEFAULT_NUM_POOLS = 25
|
||||
|
||||
# The OpenSSH server default value for MaxSessions is 10 which means we can
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import errno
|
||||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from . import constants
|
||||
from . import errors
|
||||
from .utils import create_environment_dict
|
||||
from .utils import find_executable
|
||||
|
||||
|
||||
class Store:
|
||||
|
@ -15,7 +15,7 @@ class Store:
|
|||
and erasing credentials using `program`.
|
||||
"""
|
||||
self.program = constants.PROGRAM_PREFIX + program
|
||||
self.exe = find_executable(self.program)
|
||||
self.exe = shutil.which(self.program)
|
||||
self.environment = environment
|
||||
if self.exe is None:
|
||||
raise errors.InitializationError(
|
||||
|
|
|
@ -1,32 +1,4 @@
|
|||
import distutils.spawn
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def find_executable(executable, path=None):
|
||||
"""
|
||||
As distutils.spawn.find_executable, but on Windows, look up
|
||||
every extension declared in PATHEXT instead of just `.exe`
|
||||
"""
|
||||
if sys.platform != 'win32':
|
||||
return distutils.spawn.find_executable(executable, path)
|
||||
|
||||
if path is None:
|
||||
path = os.environ['PATH']
|
||||
|
||||
paths = path.split(os.pathsep)
|
||||
extensions = os.environ.get('PATHEXT', '.exe').split(os.pathsep)
|
||||
base, ext = os.path.splitext(executable)
|
||||
|
||||
if not os.path.isfile(executable):
|
||||
for p in paths:
|
||||
for ext in extensions:
|
||||
f = os.path.join(p, base + ext)
|
||||
if os.path.isfile(f):
|
||||
return f
|
||||
return None
|
||||
else:
|
||||
return executable
|
||||
|
||||
|
||||
def create_environment_dict(overrides):
|
||||
|
|
|
@ -1,5 +1,14 @@
|
|||
import requests
|
||||
|
||||
_image_not_found_explanation_fragments = frozenset(
|
||||
fragment.lower() for fragment in [
|
||||
'no such image',
|
||||
'not found: does not exist or no pull access',
|
||||
'repository does not exist',
|
||||
'was found but does not match the specified platform',
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class DockerException(Exception):
|
||||
"""
|
||||
|
@ -21,14 +30,13 @@ def create_api_error_from_http_exception(e):
|
|||
explanation = (response.content or '').strip()
|
||||
cls = APIError
|
||||
if response.status_code == 404:
|
||||
if explanation and ('No such image' in str(explanation) or
|
||||
'not found: does not exist or no pull access'
|
||||
in str(explanation) or
|
||||
'repository does not exist' in str(explanation)):
|
||||
explanation_msg = (explanation or '').lower()
|
||||
if any(fragment in explanation_msg
|
||||
for fragment in _image_not_found_explanation_fragments):
|
||||
cls = ImageNotFound
|
||||
else:
|
||||
cls = NotFound
|
||||
raise cls(e, response=response, explanation=explanation)
|
||||
raise cls(e, response=response, explanation=explanation) from e
|
||||
|
||||
|
||||
class APIError(requests.exceptions.HTTPError, DockerException):
|
||||
|
|
|
@ -290,11 +290,12 @@ class Container(Model):
|
|||
tail (str or int): Output specified number of lines at the end of
|
||||
logs. Either an integer of number of lines or the string
|
||||
``all``. Default ``all``
|
||||
since (datetime or int): Show logs since a given datetime or
|
||||
integer epoch (in seconds)
|
||||
since (datetime, int, or float): Show logs since a given datetime,
|
||||
integer epoch (in seconds) or float (in nanoseconds)
|
||||
follow (bool): Follow log output. Default ``False``
|
||||
until (datetime or int): Show logs that occurred before the given
|
||||
datetime or integer epoch (in seconds)
|
||||
until (datetime, int, or float): Show logs that occurred before
|
||||
the given datetime, integer epoch (in seconds), or
|
||||
float (in nanoseconds)
|
||||
|
||||
Returns:
|
||||
(generator or str): Logs from the container.
|
||||
|
@ -553,6 +554,11 @@ class ContainerCollection(Collection):
|
|||
``["SYS_ADMIN", "MKNOD"]``.
|
||||
cap_drop (list of str): Drop kernel capabilities.
|
||||
cgroup_parent (str): Override the default parent cgroup.
|
||||
cgroupns (str): Override the default cgroup namespace mode for the
|
||||
container. One of:
|
||||
- ``private`` the container runs in its own private cgroup
|
||||
namespace.
|
||||
- ``host`` use the host system's cgroup namespace.
|
||||
cpu_count (int): Number of usable CPUs (Windows only).
|
||||
cpu_percent (int): Usable percentage of the available CPUs
|
||||
(Windows only).
|
||||
|
@ -600,7 +606,28 @@ class ContainerCollection(Collection):
|
|||
group_add (:py:class:`list`): List of additional group names and/or
|
||||
IDs that the container process will run as.
|
||||
healthcheck (dict): Specify a test to perform to check that the
|
||||
container is healthy.
|
||||
container is healthy. The dict takes the following keys:
|
||||
|
||||
- test (:py:class:`list` or str): Test to perform to determine
|
||||
container health. Possible values:
|
||||
|
||||
- Empty list: Inherit healthcheck from parent image
|
||||
- ``["NONE"]``: Disable healthcheck
|
||||
- ``["CMD", args...]``: exec arguments directly.
|
||||
- ``["CMD-SHELL", command]``: Run command in the system's
|
||||
default shell.
|
||||
|
||||
If a string is provided, it will be used as a ``CMD-SHELL``
|
||||
command.
|
||||
- interval (int): The time to wait between checks in
|
||||
nanoseconds. It should be 0 or at least 1000000 (1 ms).
|
||||
- timeout (int): The time to wait before considering the check
|
||||
to have hung. It should be 0 or at least 1000000 (1 ms).
|
||||
- retries (int): The number of consecutive failures needed to
|
||||
consider a container as unhealthy.
|
||||
- start_period (int): Start period for the container to
|
||||
initialize before starting health-retries countdown in
|
||||
nanoseconds. It should be 0 or at least 1000000 (1 ms).
|
||||
hostname (str): Optional hostname for the container.
|
||||
init (bool): Run an init inside the container that forwards
|
||||
signals and reaps processes
|
||||
|
@ -644,7 +671,7 @@ class ContainerCollection(Collection):
|
|||
network_mode (str): One of:
|
||||
|
||||
- ``bridge`` Create a new network stack for the container on
|
||||
on the bridge network.
|
||||
the bridge network.
|
||||
- ``none`` No networking for this container.
|
||||
- ``container:<name|id>`` Reuse another container's network
|
||||
stack.
|
||||
|
@ -761,7 +788,8 @@ class ContainerCollection(Collection):
|
|||
{'/home/user1/': {'bind': '/mnt/vol2', 'mode': 'rw'},
|
||||
'/var/www': {'bind': '/mnt/vol1', 'mode': 'ro'}}
|
||||
|
||||
Or a list of strings which each one of its elements specifies a mount volume.
|
||||
Or a list of strings which each one of its elements specifies a
|
||||
mount volume.
|
||||
|
||||
For example:
|
||||
|
||||
|
@ -800,7 +828,7 @@ class ContainerCollection(Collection):
|
|||
image = image.id
|
||||
stream = kwargs.pop('stream', False)
|
||||
detach = kwargs.pop('detach', False)
|
||||
platform = kwargs.pop('platform', None)
|
||||
platform = kwargs.get('platform', None)
|
||||
|
||||
if detach and remove:
|
||||
if version_gte(self.client.api._version, '1.25'):
|
||||
|
@ -984,6 +1012,7 @@ RUN_CREATE_KWARGS = [
|
|||
'mac_address',
|
||||
'name',
|
||||
'network_disabled',
|
||||
'platform',
|
||||
'stdin_open',
|
||||
'stop_signal',
|
||||
'stop_timeout',
|
||||
|
@ -1001,6 +1030,7 @@ RUN_HOST_CONFIG_KWARGS = [
|
|||
'cap_add',
|
||||
'cap_drop',
|
||||
'cgroup_parent',
|
||||
'cgroupns',
|
||||
'cpu_count',
|
||||
'cpu_percent',
|
||||
'cpu_period',
|
||||
|
|
|
@ -15,7 +15,10 @@ class Image(Model):
|
|||
An image on the server.
|
||||
"""
|
||||
def __repr__(self):
|
||||
return "<{}: '{}'>".format(self.__class__.__name__, "', '".join(self.tags))
|
||||
return "<{}: '{}'>".format(
|
||||
self.__class__.__name__,
|
||||
"', '".join(self.tags),
|
||||
)
|
||||
|
||||
@property
|
||||
def labels(self):
|
||||
|
@ -28,12 +31,12 @@ class Image(Model):
|
|||
@property
|
||||
def short_id(self):
|
||||
"""
|
||||
The ID of the image truncated to 10 characters, plus the ``sha256:``
|
||||
The ID of the image truncated to 12 characters, plus the ``sha256:``
|
||||
prefix.
|
||||
"""
|
||||
if self.id.startswith('sha256:'):
|
||||
return self.id[:17]
|
||||
return self.id[:10]
|
||||
return self.id[:19]
|
||||
return self.id[:12]
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
|
@ -58,6 +61,24 @@ class Image(Model):
|
|||
"""
|
||||
return self.client.api.history(self.id)
|
||||
|
||||
def remove(self, force=False, noprune=False):
|
||||
"""
|
||||
Remove this image.
|
||||
|
||||
Args:
|
||||
force (bool): Force removal of the image
|
||||
noprune (bool): Do not delete untagged parents
|
||||
|
||||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
If the server returns an error.
|
||||
"""
|
||||
return self.client.api.remove_image(
|
||||
self.id,
|
||||
force=force,
|
||||
noprune=noprune,
|
||||
)
|
||||
|
||||
def save(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE, named=False):
|
||||
"""
|
||||
Get a tarball of an image. Similar to the ``docker save`` command.
|
||||
|
@ -138,10 +159,10 @@ class RegistryData(Model):
|
|||
@property
|
||||
def short_id(self):
|
||||
"""
|
||||
The ID of the image truncated to 10 characters, plus the ``sha256:``
|
||||
The ID of the image truncated to 12 characters, plus the ``sha256:``
|
||||
prefix.
|
||||
"""
|
||||
return self.id[:17]
|
||||
return self.id[:19]
|
||||
|
||||
def pull(self, platform=None):
|
||||
"""
|
||||
|
@ -203,10 +224,10 @@ class ImageCollection(Collection):
|
|||
Build an image and return it. Similar to the ``docker build``
|
||||
command. Either ``path`` or ``fileobj`` must be set.
|
||||
|
||||
If you have a tar file for the Docker build context (including a
|
||||
Dockerfile) already, pass a readable file-like object to ``fileobj``
|
||||
and also pass ``custom_context=True``. If the stream is compressed
|
||||
also, set ``encoding`` to the correct value (e.g ``gzip``).
|
||||
If you already have a tar file for the Docker build context (including
|
||||
a Dockerfile), pass a readable file-like object to ``fileobj``
|
||||
and also pass ``custom_context=True``. If the stream is also
|
||||
compressed, set ``encoding`` to the correct value (e.g ``gzip``).
|
||||
|
||||
If you want to get the raw output of the build, use the
|
||||
:py:meth:`~docker.api.build.BuildApiMixin.build` method in the
|
||||
|
@ -263,7 +284,7 @@ class ImageCollection(Collection):
|
|||
|
||||
Returns:
|
||||
(tuple): The first item is the :py:class:`Image` object for the
|
||||
image that was build. The second item is a generator of the
|
||||
image that was built. The second item is a generator of the
|
||||
build logs as JSON-decoded objects.
|
||||
|
||||
Raises:
|
||||
|
|
|
@ -44,16 +44,19 @@ class Plugin(Model):
|
|||
self.client.api.configure_plugin(self.name, options)
|
||||
self.reload()
|
||||
|
||||
def disable(self):
|
||||
def disable(self, force=False):
|
||||
"""
|
||||
Disable the plugin.
|
||||
|
||||
Args:
|
||||
force (bool): Force disable. Default: False
|
||||
|
||||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
If the server returns an error.
|
||||
"""
|
||||
|
||||
self.client.api.disable_plugin(self.name)
|
||||
self.client.api.disable_plugin(self.name, force)
|
||||
self.reload()
|
||||
|
||||
def enable(self, timeout=0):
|
||||
|
@ -117,7 +120,11 @@ class Plugin(Model):
|
|||
if remote is None:
|
||||
remote = self.name
|
||||
privileges = self.client.api.plugin_privileges(remote)
|
||||
yield from self.client.api.upgrade_plugin(self.name, remote, privileges)
|
||||
yield from self.client.api.upgrade_plugin(
|
||||
self.name,
|
||||
remote,
|
||||
privileges,
|
||||
)
|
||||
self.reload()
|
||||
|
||||
|
||||
|
|
|
@ -35,9 +35,9 @@ class Model:
|
|||
@property
|
||||
def short_id(self):
|
||||
"""
|
||||
The ID of the object, truncated to 10 characters.
|
||||
The ID of the object, truncated to 12 characters.
|
||||
"""
|
||||
return self.id[:10]
|
||||
return self.id[:12]
|
||||
|
||||
def reload(self):
|
||||
"""
|
||||
|
|
|
@ -217,6 +217,8 @@ class ServiceCollection(Collection):
|
|||
the default set for the container.
|
||||
cap_drop (:py:class:`list`): A list of kernel capabilities to drop
|
||||
from the default set for the container.
|
||||
sysctls (:py:class:`dict`): A dict of sysctl values to add to the
|
||||
container
|
||||
|
||||
Returns:
|
||||
:py:class:`Service`: The created service.
|
||||
|
@ -305,6 +307,7 @@ CONTAINER_SPEC_KWARGS = [
|
|||
'tty',
|
||||
'user',
|
||||
'workdir',
|
||||
'sysctls',
|
||||
]
|
||||
|
||||
# kwargs to copy straight over to TaskTemplate
|
||||
|
@ -320,6 +323,7 @@ CREATE_SERVICE_KWARGS = [
|
|||
'labels',
|
||||
'mode',
|
||||
'update_config',
|
||||
'rollback_config',
|
||||
'endpoint_spec',
|
||||
]
|
||||
|
||||
|
|
|
@ -35,7 +35,8 @@ class Swarm(Model):
|
|||
|
||||
def init(self, advertise_addr=None, listen_addr='0.0.0.0:2377',
|
||||
force_new_cluster=False, default_addr_pool=None,
|
||||
subnet_size=None, data_path_addr=None, **kwargs):
|
||||
subnet_size=None, data_path_addr=None, data_path_port=None,
|
||||
**kwargs):
|
||||
"""
|
||||
Initialize a new swarm on this Engine.
|
||||
|
||||
|
@ -65,6 +66,9 @@ class Swarm(Model):
|
|||
networks created from the default subnet pool. Default: None
|
||||
data_path_addr (string): Address or interface to use for data path
|
||||
traffic. For example, 192.168.1.1, or an interface, like eth0.
|
||||
data_path_port (int): Port number to use for data path traffic.
|
||||
Acceptable port range is 1024 to 49151. If set to ``None`` or
|
||||
0, the default port 4789 will be used. Default: None
|
||||
task_history_retention_limit (int): Maximum number of tasks
|
||||
history stored.
|
||||
snapshot_interval (int): Number of logs entries between snapshot.
|
||||
|
@ -121,6 +125,7 @@ class Swarm(Model):
|
|||
'default_addr_pool': default_addr_pool,
|
||||
'subnet_size': subnet_size,
|
||||
'data_path_addr': data_path_addr,
|
||||
'data_path_port': data_path_port,
|
||||
}
|
||||
init_kwargs['swarm_spec'] = self.client.api.create_swarm_spec(**kwargs)
|
||||
node_id = self.client.api.init_swarm(**init_kwargs)
|
||||
|
|
|
@ -12,8 +12,9 @@ class TLSConfig:
|
|||
Args:
|
||||
client_cert (tuple of str): Path to client cert, path to client key.
|
||||
ca_cert (str): Path to CA cert file.
|
||||
verify (bool or str): This can be ``False`` or a path to a CA cert
|
||||
file.
|
||||
verify (bool or str): This can be a bool or a path to a CA cert
|
||||
file to verify against. If ``True``, verify using ca_cert;
|
||||
if ``False`` or not specified, do not verify.
|
||||
ssl_version (int): A valid `SSL version`_.
|
||||
assert_hostname (bool): Verify the hostname of the server.
|
||||
|
||||
|
@ -37,30 +38,11 @@ class TLSConfig:
|
|||
self.assert_hostname = assert_hostname
|
||||
self.assert_fingerprint = assert_fingerprint
|
||||
|
||||
# TODO(dperny): according to the python docs, PROTOCOL_TLSvWhatever is
|
||||
# depcreated, and it's recommended to use OPT_NO_TLSvWhatever instead
|
||||
# to exclude versions. But I think that might require a bigger
|
||||
# architectural change, so I've opted not to pursue it at this time
|
||||
|
||||
# If the user provides an SSL version, we should use their preference
|
||||
if ssl_version:
|
||||
self.ssl_version = ssl_version
|
||||
else:
|
||||
# If the user provides no ssl version, we should default to
|
||||
# TLSv1_2. This option is the most secure, and will work for the
|
||||
# majority of users with reasonably up-to-date software. However,
|
||||
# before doing so, detect openssl version to ensure we can support
|
||||
# it.
|
||||
if ssl.OPENSSL_VERSION_INFO[:3] >= (1, 0, 1) and hasattr(
|
||||
ssl, 'PROTOCOL_TLSv1_2'):
|
||||
# If the OpenSSL version is high enough to support TLSv1_2,
|
||||
# then we should use it.
|
||||
self.ssl_version = getattr(ssl, 'PROTOCOL_TLSv1_2')
|
||||
else:
|
||||
# Otherwise, TLS v1.0 seems to be the safest default;
|
||||
# SSLv23 fails in mysterious ways:
|
||||
# https://github.com/docker/docker-py/issues/963
|
||||
self.ssl_version = ssl.PROTOCOL_TLSv1
|
||||
self.ssl_version = ssl.PROTOCOL_TLS_CLIENT
|
||||
|
||||
# "client_cert" must have both or neither cert/key files. In
|
||||
# either case, Alert the user when both are expected, but any are
|
||||
|
|
|
@ -61,7 +61,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
"Pool reached maximum size and no more "
|
||||
"connections are allowed."
|
||||
)
|
||||
pass # Oh well, we'll create a new connection then
|
||||
# Oh well, we'll create a new connection then
|
||||
|
||||
return conn or self._new_conn()
|
||||
|
||||
|
|
|
@ -58,12 +58,11 @@ class SSHSocket(socket.socket):
|
|||
env.pop('SSL_CERT_FILE', None)
|
||||
|
||||
self.proc = subprocess.Popen(
|
||||
' '.join(args),
|
||||
args,
|
||||
env=env,
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE,
|
||||
preexec_fn=None if constants.IS_WINDOWS_PLATFORM else preexec_func)
|
||||
preexec_fn=preexec_func)
|
||||
|
||||
def _write(self, data):
|
||||
if not self.proc or self.proc.stdin.closed:
|
||||
|
@ -156,7 +155,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
"Pool reached maximum size and no more "
|
||||
"connections are allowed."
|
||||
)
|
||||
pass # Oh well, we'll create a new connection then
|
||||
# Oh well, we'll create a new connection then
|
||||
|
||||
return conn or self._new_conn()
|
||||
|
||||
|
@ -204,7 +203,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
|
|||
host_config = conf.lookup(base_url.hostname)
|
||||
if 'proxycommand' in host_config:
|
||||
self.ssh_params["sock"] = paramiko.ProxyCommand(
|
||||
self.ssh_conf['proxycommand']
|
||||
host_config['proxycommand']
|
||||
)
|
||||
if 'hostname' in host_config:
|
||||
self.ssh_params['hostname'] = host_config['hostname']
|
||||
|
@ -216,7 +215,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
|
|||
self.ssh_params['key_filename'] = host_config['identityfile']
|
||||
|
||||
self.ssh_client.load_system_host_keys()
|
||||
self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy())
|
||||
self.ssh_client.set_missing_host_key_policy(paramiko.RejectPolicy())
|
||||
|
||||
def _connect(self):
|
||||
if self.ssh_client:
|
||||
|
|
|
@ -2,9 +2,7 @@
|
|||
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
|
||||
https://github.com/kennethreitz/requests/pull/799
|
||||
"""
|
||||
import sys
|
||||
|
||||
from distutils.version import StrictVersion
|
||||
from packaging.version import Version
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
from docker.transport.basehttpadapter import BaseHTTPAdapter
|
||||
|
@ -17,12 +15,6 @@ except ImportError:
|
|||
|
||||
PoolManager = urllib3.poolmanager.PoolManager
|
||||
|
||||
# Monkey-patching match_hostname with a version that supports
|
||||
# IP-address checking. Not necessary for Python 3.5 and above
|
||||
if sys.version_info[0] < 3 or sys.version_info[1] < 5:
|
||||
from backports.ssl_match_hostname import match_hostname
|
||||
urllib3.connection.match_hostname = match_hostname
|
||||
|
||||
|
||||
class SSLHTTPAdapter(BaseHTTPAdapter):
|
||||
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
|
||||
|
@ -70,4 +62,4 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
|
|||
return False
|
||||
if urllib_ver == 'dev':
|
||||
return True
|
||||
return StrictVersion(urllib_ver) > StrictVersion('1.5')
|
||||
return Version(urllib_ver) > Version('1.5')
|
||||
|
|
|
@ -272,7 +272,8 @@ class HostConfig(dict):
|
|||
volume_driver=None, cpu_count=None, cpu_percent=None,
|
||||
nano_cpus=None, cpuset_mems=None, runtime=None, mounts=None,
|
||||
cpu_rt_period=None, cpu_rt_runtime=None,
|
||||
device_cgroup_rules=None, device_requests=None):
|
||||
device_cgroup_rules=None, device_requests=None,
|
||||
cgroupns=None):
|
||||
|
||||
if mem_limit is not None:
|
||||
self['Memory'] = parse_bytes(mem_limit)
|
||||
|
@ -646,6 +647,9 @@ class HostConfig(dict):
|
|||
req = DeviceRequest(**req)
|
||||
self['DeviceRequests'].append(req)
|
||||
|
||||
if cgroupns:
|
||||
self['CgroupnsMode'] = cgroupns
|
||||
|
||||
|
||||
def host_config_type_error(param, param_value, expected):
|
||||
error_msg = 'Invalid type for {0} param: expected {1} but found {2}'
|
||||
|
|
|
@ -4,7 +4,8 @@ from ..utils import normalize_links, version_lt
|
|||
|
||||
class EndpointConfig(dict):
|
||||
def __init__(self, version, aliases=None, links=None, ipv4_address=None,
|
||||
ipv6_address=None, link_local_ips=None, driver_opt=None):
|
||||
ipv6_address=None, link_local_ips=None, driver_opt=None,
|
||||
mac_address=None):
|
||||
if version_lt(version, '1.22'):
|
||||
raise errors.InvalidVersion(
|
||||
'Endpoint config is not supported for API version < 1.22'
|
||||
|
@ -23,6 +24,13 @@ class EndpointConfig(dict):
|
|||
if ipv6_address:
|
||||
ipam_config['IPv6Address'] = ipv6_address
|
||||
|
||||
if mac_address:
|
||||
if version_lt(version, '1.25'):
|
||||
raise errors.InvalidVersion(
|
||||
'mac_address is not supported for API version < 1.25'
|
||||
)
|
||||
self['MacAddress'] = mac_address
|
||||
|
||||
if link_local_ips is not None:
|
||||
if version_lt(version, '1.24'):
|
||||
raise errors.InvalidVersion(
|
||||
|
|
|
@ -29,6 +29,7 @@ class TaskTemplate(dict):
|
|||
force_update (int): A counter that triggers an update even if no
|
||||
relevant parameters have been changed.
|
||||
"""
|
||||
|
||||
def __init__(self, container_spec, resources=None, restart_policy=None,
|
||||
placement=None, log_driver=None, networks=None,
|
||||
force_update=None):
|
||||
|
@ -114,14 +115,17 @@ class ContainerSpec(dict):
|
|||
default set for the container.
|
||||
cap_drop (:py:class:`list`): A list of kernel capabilities to drop from
|
||||
the default set for the container.
|
||||
sysctls (:py:class:`dict`): A dict of sysctl values to add to
|
||||
the container
|
||||
"""
|
||||
|
||||
def __init__(self, image, command=None, args=None, hostname=None, env=None,
|
||||
workdir=None, user=None, labels=None, mounts=None,
|
||||
stop_grace_period=None, secrets=None, tty=None, groups=None,
|
||||
open_stdin=None, read_only=None, stop_signal=None,
|
||||
healthcheck=None, hosts=None, dns_config=None, configs=None,
|
||||
privileges=None, isolation=None, init=None, cap_add=None,
|
||||
cap_drop=None):
|
||||
cap_drop=None, sysctls=None):
|
||||
self['Image'] = image
|
||||
|
||||
if isinstance(command, str):
|
||||
|
@ -203,6 +207,12 @@ class ContainerSpec(dict):
|
|||
|
||||
self['CapabilityDrop'] = cap_drop
|
||||
|
||||
if sysctls is not None:
|
||||
if not isinstance(sysctls, dict):
|
||||
raise TypeError('sysctls must be a dict')
|
||||
|
||||
self['Sysctls'] = sysctls
|
||||
|
||||
|
||||
class Mount(dict):
|
||||
"""
|
||||
|
@ -231,6 +241,7 @@ class Mount(dict):
|
|||
tmpfs_size (int or string): The size for the tmpfs mount in bytes.
|
||||
tmpfs_mode (int): The permission mode for the tmpfs mount.
|
||||
"""
|
||||
|
||||
def __init__(self, target, source, type='volume', read_only=False,
|
||||
consistency=None, propagation=None, no_copy=False,
|
||||
labels=None, driver_config=None, tmpfs_size=None,
|
||||
|
@ -331,6 +342,7 @@ class Resources(dict):
|
|||
``{ resource_name: resource_value }``. Alternatively, a list of
|
||||
of resource specifications as defined by the Engine API.
|
||||
"""
|
||||
|
||||
def __init__(self, cpu_limit=None, mem_limit=None, cpu_reservation=None,
|
||||
mem_reservation=None, generic_resources=None):
|
||||
limits = {}
|
||||
|
@ -399,8 +411,9 @@ class UpdateConfig(dict):
|
|||
an update before the failure action is invoked, specified as a
|
||||
floating point number between 0 and 1. Default: 0
|
||||
order (string): Specifies the order of operations when rolling out an
|
||||
updated task. Either ``start_first`` or ``stop_first`` are accepted.
|
||||
updated task. Either ``start-first`` or ``stop-first`` are accepted.
|
||||
"""
|
||||
|
||||
def __init__(self, parallelism=0, delay=None, failure_action='continue',
|
||||
monitor=None, max_failure_ratio=None, order=None):
|
||||
self['Parallelism'] = parallelism
|
||||
|
@ -436,7 +449,8 @@ class UpdateConfig(dict):
|
|||
|
||||
class RollbackConfig(UpdateConfig):
|
||||
"""
|
||||
Used to specify the way containe rollbacks should be performed by a service
|
||||
Used to specify the way container rollbacks should be performed by a
|
||||
service
|
||||
|
||||
Args:
|
||||
parallelism (int): Maximum number of tasks to be rolled back in one
|
||||
|
@ -452,7 +466,7 @@ class RollbackConfig(UpdateConfig):
|
|||
a rollback before the failure action is invoked, specified as a
|
||||
floating point number between 0 and 1. Default: 0
|
||||
order (string): Specifies the order of operations when rolling out a
|
||||
rolled back task. Either ``start_first`` or ``stop_first`` are
|
||||
rolled back task. Either ``start-first`` or ``stop-first`` are
|
||||
accepted.
|
||||
"""
|
||||
pass
|
||||
|
@ -511,6 +525,7 @@ class DriverConfig(dict):
|
|||
name (string): Name of the driver to use.
|
||||
options (dict): Driver-specific options. Default: ``None``.
|
||||
"""
|
||||
|
||||
def __init__(self, name, options=None):
|
||||
self['Name'] = name
|
||||
if options:
|
||||
|
@ -532,6 +547,7 @@ class EndpointSpec(dict):
|
|||
is ``(target_port [, protocol [, publish_mode]])``.
|
||||
Ports can only be provided if the ``vip`` resolution mode is used.
|
||||
"""
|
||||
|
||||
def __init__(self, mode=None, ports=None):
|
||||
if ports:
|
||||
self['Ports'] = convert_service_ports(ports)
|
||||
|
@ -574,37 +590,70 @@ def convert_service_ports(ports):
|
|||
|
||||
class ServiceMode(dict):
|
||||
"""
|
||||
Indicate whether a service should be deployed as a replicated or global
|
||||
service, and associated parameters
|
||||
Indicate whether a service or a job should be deployed as a replicated
|
||||
or global service, and associated parameters
|
||||
|
||||
Args:
|
||||
mode (string): Can be either ``replicated`` or ``global``
|
||||
mode (string): Can be either ``replicated``, ``global``,
|
||||
``replicated-job`` or ``global-job``
|
||||
replicas (int): Number of replicas. For replicated services only.
|
||||
concurrency (int): Number of concurrent jobs. For replicated job
|
||||
services only.
|
||||
"""
|
||||
def __init__(self, mode, replicas=None):
|
||||
if mode not in ('replicated', 'global'):
|
||||
raise errors.InvalidArgument(
|
||||
'mode must be either "replicated" or "global"'
|
||||
)
|
||||
if mode != 'replicated' and replicas is not None:
|
||||
raise errors.InvalidArgument(
|
||||
'replicas can only be used for replicated mode'
|
||||
)
|
||||
self[mode] = {}
|
||||
if replicas is not None:
|
||||
self[mode]['Replicas'] = replicas
|
||||
|
||||
@property
|
||||
def mode(self):
|
||||
if 'global' in self:
|
||||
return 'global'
|
||||
return 'replicated'
|
||||
def __init__(self, mode, replicas=None, concurrency=None):
|
||||
replicated_modes = ('replicated', 'replicated-job')
|
||||
supported_modes = replicated_modes + ('global', 'global-job')
|
||||
|
||||
if mode not in supported_modes:
|
||||
raise errors.InvalidArgument(
|
||||
'mode must be either "replicated", "global", "replicated-job"'
|
||||
' or "global-job"'
|
||||
)
|
||||
|
||||
if mode not in replicated_modes:
|
||||
if replicas is not None:
|
||||
raise errors.InvalidArgument(
|
||||
'replicas can only be used for "replicated" or'
|
||||
' "replicated-job" mode'
|
||||
)
|
||||
|
||||
if concurrency is not None:
|
||||
raise errors.InvalidArgument(
|
||||
'concurrency can only be used for "replicated-job" mode'
|
||||
)
|
||||
|
||||
service_mode = self._convert_mode(mode)
|
||||
self.mode = service_mode
|
||||
self[service_mode] = {}
|
||||
|
||||
if replicas is not None:
|
||||
if mode == 'replicated':
|
||||
self[service_mode]['Replicas'] = replicas
|
||||
|
||||
if mode == 'replicated-job':
|
||||
self[service_mode]['MaxConcurrent'] = concurrency or 1
|
||||
self[service_mode]['TotalCompletions'] = replicas
|
||||
|
||||
@staticmethod
|
||||
def _convert_mode(original_mode):
|
||||
if original_mode == 'global-job':
|
||||
return 'GlobalJob'
|
||||
|
||||
if original_mode == 'replicated-job':
|
||||
return 'ReplicatedJob'
|
||||
|
||||
return original_mode
|
||||
|
||||
@property
|
||||
def replicas(self):
|
||||
if self.mode != 'replicated':
|
||||
return None
|
||||
return self['replicated'].get('Replicas')
|
||||
if 'replicated' in self:
|
||||
return self['replicated'].get('Replicas')
|
||||
|
||||
if 'ReplicatedJob' in self:
|
||||
return self['ReplicatedJob'].get('TotalCompletions')
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class SecretReference(dict):
|
||||
|
@ -678,6 +727,7 @@ class Placement(dict):
|
|||
platforms (:py:class:`list` of tuple): A list of platforms
|
||||
expressed as ``(arch, os)`` tuples
|
||||
"""
|
||||
|
||||
def __init__(self, constraints=None, preferences=None, platforms=None,
|
||||
maxreplicas=None):
|
||||
if constraints is not None:
|
||||
|
@ -710,6 +760,7 @@ class PlacementPreference(dict):
|
|||
the scheduler will try to spread tasks evenly over groups of
|
||||
nodes identified by this label.
|
||||
"""
|
||||
|
||||
def __init__(self, strategy, descriptor):
|
||||
if strategy != 'spread':
|
||||
raise errors.InvalidArgument(
|
||||
|
@ -731,6 +782,7 @@ class DNSConfig(dict):
|
|||
options (:py:class:`list`): A list of internal resolver variables
|
||||
to be modified (e.g., ``debug``, ``ndots:3``, etc.).
|
||||
"""
|
||||
|
||||
def __init__(self, nameservers=None, search=None, options=None):
|
||||
self['Nameservers'] = nameservers
|
||||
self['Search'] = search
|
||||
|
@ -761,6 +813,7 @@ class Privileges(dict):
|
|||
selinux_type (string): SELinux type label
|
||||
selinux_level (string): SELinux level label
|
||||
"""
|
||||
|
||||
def __init__(self, credentialspec_file=None, credentialspec_registry=None,
|
||||
selinux_disable=None, selinux_user=None, selinux_role=None,
|
||||
selinux_type=None, selinux_level=None):
|
||||
|
@ -803,6 +856,7 @@ class NetworkAttachmentConfig(dict):
|
|||
options (:py:class:`dict`): Driver attachment options for the
|
||||
network target.
|
||||
"""
|
||||
|
||||
def __init__(self, target, aliases=None, options=None):
|
||||
self['Target'] = target
|
||||
self['Aliases'] = aliases
|
||||
|
|
|
@ -224,6 +224,9 @@ class Pattern:
|
|||
@classmethod
|
||||
def normalize(cls, p):
|
||||
|
||||
# Remove trailing spaces
|
||||
p = p.strip()
|
||||
|
||||
# Leading and trailing slashes are not relevant. Yes,
|
||||
# "foo.py/" must exclude the "foo.py" regular file. "."
|
||||
# components are not relevant either, even if the whole
|
||||
|
|
|
@ -1,20 +1,27 @@
|
|||
import base64
|
||||
import collections
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import shlex
|
||||
import string
|
||||
from datetime import datetime
|
||||
from distutils.version import StrictVersion
|
||||
from packaging.version import Version
|
||||
|
||||
from .. import errors
|
||||
from .. import tls
|
||||
from ..constants import DEFAULT_HTTP_HOST
|
||||
from ..constants import DEFAULT_UNIX_SOCKET
|
||||
from ..constants import DEFAULT_NPIPE
|
||||
from ..constants import BYTE_UNITS
|
||||
from ..tls import TLSConfig
|
||||
|
||||
from urllib.parse import splitnport, urlparse
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
|
||||
URLComponents = collections.namedtuple(
|
||||
'URLComponents',
|
||||
'scheme netloc url params query fragment',
|
||||
)
|
||||
|
||||
|
||||
def create_ipam_pool(*args, **kwargs):
|
||||
|
@ -49,8 +56,8 @@ def compare_version(v1, v2):
|
|||
>>> compare_version(v2, v2)
|
||||
0
|
||||
"""
|
||||
s1 = StrictVersion(v1)
|
||||
s2 = StrictVersion(v2)
|
||||
s1 = Version(v1)
|
||||
s2 = Version(v2)
|
||||
if s1 == s2:
|
||||
return 0
|
||||
elif s1 > s2:
|
||||
|
@ -201,10 +208,6 @@ def parse_repository_tag(repo_name):
|
|||
|
||||
|
||||
def parse_host(addr, is_win32=False, tls=False):
|
||||
path = ''
|
||||
port = None
|
||||
host = None
|
||||
|
||||
# Sensible defaults
|
||||
if not addr and is_win32:
|
||||
return DEFAULT_NPIPE
|
||||
|
@ -263,20 +266,20 @@ def parse_host(addr, is_win32=False, tls=False):
|
|||
# to be valid and equivalent to unix:///path
|
||||
path = '/'.join((parsed_url.hostname, path))
|
||||
|
||||
netloc = parsed_url.netloc
|
||||
if proto in ('tcp', 'ssh'):
|
||||
# parsed_url.hostname strips brackets from IPv6 addresses,
|
||||
# which can be problematic hence our use of splitnport() instead.
|
||||
host, port = splitnport(parsed_url.netloc)
|
||||
if port is None or port < 0:
|
||||
port = parsed_url.port or 0
|
||||
if port <= 0:
|
||||
if proto != 'ssh':
|
||||
raise errors.DockerException(
|
||||
'Invalid bind address format: port is required:'
|
||||
' {}'.format(addr)
|
||||
)
|
||||
port = 22
|
||||
netloc = f'{parsed_url.netloc}:{port}'
|
||||
|
||||
if not host:
|
||||
host = DEFAULT_HTTP_HOST
|
||||
if not parsed_url.hostname:
|
||||
netloc = f'{DEFAULT_HTTP_HOST}:{port}'
|
||||
|
||||
# Rewrite schemes to fit library internals (requests adapters)
|
||||
if proto == 'tcp':
|
||||
|
@ -286,7 +289,15 @@ def parse_host(addr, is_win32=False, tls=False):
|
|||
|
||||
if proto in ('http+unix', 'npipe'):
|
||||
return f"{proto}://{path}".rstrip('/')
|
||||
return f'{proto}://{host}:{port}{path}'.rstrip('/')
|
||||
|
||||
return urlunparse(URLComponents(
|
||||
scheme=proto,
|
||||
netloc=netloc,
|
||||
url=path,
|
||||
params='',
|
||||
query='',
|
||||
fragment='',
|
||||
)).rstrip('/')
|
||||
|
||||
|
||||
def parse_devices(devices):
|
||||
|
@ -351,7 +362,7 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None):
|
|||
# so if it's not set already then set it to false.
|
||||
assert_hostname = False
|
||||
|
||||
params['tls'] = tls.TLSConfig(
|
||||
params['tls'] = TLSConfig(
|
||||
client_cert=(os.path.join(cert_path, 'cert.pem'),
|
||||
os.path.join(cert_path, 'key.pem')),
|
||||
ca_cert=os.path.join(cert_path, 'ca.pem'),
|
||||
|
|
|
@ -1,2 +1,14 @@
|
|||
version = "5.1.0-dev"
|
||||
version_info = tuple(int(d) for d in version.split("-")[0].split("."))
|
||||
try:
|
||||
from ._version import __version__
|
||||
except ImportError:
|
||||
try:
|
||||
# importlib.metadata available in Python 3.8+, the fallback (0.0.0)
|
||||
# is fine because release builds use _version (above) rather than
|
||||
# this code path, so it only impacts developing w/ 3.7
|
||||
from importlib.metadata import version, PackageNotFoundError
|
||||
try:
|
||||
__version__ = version('docker')
|
||||
except PackageNotFoundError:
|
||||
__version__ = '0.0.0'
|
||||
except ImportError:
|
||||
__version__ = '0.0.0'
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
recommonmark==0.4.0
|
||||
Sphinx==1.4.6
|
||||
myst-parser==0.18.0
|
||||
Sphinx==5.1.1
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
dl.hide-signature > dt {
|
||||
display: none;
|
||||
}
|
||||
|
||||
dl.field-list > dt {
|
||||
/* prevent code blocks from forcing wrapping on the "Parameters" header */
|
||||
word-break: initial;
|
||||
}
|
||||
|
||||
code.literal{
|
||||
hyphens: none;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,44 @@
|
|||
Change log
|
||||
Changelog
|
||||
==========
|
||||
|
||||
6.0.0
|
||||
-----
|
||||
|
||||
### Upgrade Notes
|
||||
- Minimum supported Python version is 3.7+
|
||||
- When installing with pip, the `docker[tls]` extra is deprecated and a no-op,
|
||||
use `docker` for same functionality (TLS support is always available now)
|
||||
- Native Python SSH client (used by default / `use_ssh_client=False`) will now
|
||||
reject unknown host keys with `paramiko.ssh_exception.SSHException`
|
||||
- Short IDs are now 12 characters instead of 10 characters (same as Docker CLI)
|
||||
|
||||
### Features
|
||||
- Python 3.10 support
|
||||
- Automatically negotiate most secure TLS version
|
||||
- Add `platform` (e.g. `linux/amd64`, `darwin/arm64`) to container create & run
|
||||
- Add support for `GlobalJob` and `ReplicatedJobs` for Swarm
|
||||
- Add `remove()` method on `Image`
|
||||
- Add `force` param to `disable()` on `Plugin`
|
||||
|
||||
### Bugfixes
|
||||
- Fix install issues on Windows related to `pywin32`
|
||||
- Do not accept unknown SSH host keys in native Python SSH mode
|
||||
- Use 12 character short IDs for consistency with Docker CLI
|
||||
- Ignore trailing whitespace in `.dockerignore` files
|
||||
- Fix IPv6 host parsing when explicit port specified
|
||||
- Fix `ProxyCommand` option for SSH connections
|
||||
- Do not spawn extra subshell when launching external SSH client
|
||||
- Improve exception semantics to preserve context
|
||||
- Documentation improvements (formatting, examples, typos, missing params)
|
||||
|
||||
### Miscellaneous
|
||||
- Upgrade dependencies in `requirements.txt` to latest versions
|
||||
- Remove extraneous transitive dependencies
|
||||
- Eliminate usages of deprecated functions/methods
|
||||
- Test suite reliability improvements
|
||||
- GitHub Actions workflows for linting, unit tests, integration tests, and
|
||||
publishing releases
|
||||
|
||||
5.0.3
|
||||
-----
|
||||
|
||||
|
|
28
docs/conf.py
28
docs/conf.py
|
@ -33,24 +33,19 @@ sys.path.insert(0, os.path.abspath('..'))
|
|||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.napoleon',
|
||||
'myst_parser'
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
|
||||
from recommonmark.parser import CommonMarkParser
|
||||
|
||||
source_parsers = {
|
||||
'.md': CommonMarkParser,
|
||||
source_suffix = {
|
||||
'.rst': 'restructuredtext',
|
||||
'.txt': 'markdown',
|
||||
'.md': 'markdown',
|
||||
}
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
source_suffix = ['.rst', '.md']
|
||||
# source_suffix = '.md'
|
||||
|
||||
# The encoding of source files.
|
||||
#
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
@ -68,19 +63,18 @@ author = 'Docker Inc'
|
|||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
with open('../docker/version.py') as vfile:
|
||||
exec(vfile.read())
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = version
|
||||
# The short X.Y version.
|
||||
version = f'{version_info[0]}.{version_info[1]}'
|
||||
# see https://github.com/pypa/setuptools_scm#usage-from-sphinx
|
||||
from importlib.metadata import version
|
||||
release = version('docker')
|
||||
# for example take major/minor
|
||||
version = '.'.join(release.split('.')[:2])
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
language = 'en'
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
|
|
|
@ -15,7 +15,7 @@ For example, to check the server against a specific CA certificate:
|
|||
|
||||
.. code-block:: python
|
||||
|
||||
tls_config = docker.tls.TLSConfig(ca_cert='/path/to/ca.pem')
|
||||
tls_config = docker.tls.TLSConfig(ca_cert='/path/to/ca.pem', verify=True)
|
||||
client = docker.DockerClient(base_url='<https_url>', tls=tls_config)
|
||||
|
||||
This is the equivalent of ``docker --tlsverify --tlscacert /path/to/ca.pem ...``.
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
[build-system]
|
||||
requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"]
|
||||
|
||||
[tool.setuptools_scm]
|
||||
write_to = 'docker/_version.py'
|
|
@ -1,17 +1,6 @@
|
|||
appdirs==1.4.3
|
||||
asn1crypto==0.22.0
|
||||
backports.ssl-match-hostname==3.5.0.1
|
||||
cffi==1.14.4
|
||||
cryptography==3.4.7
|
||||
enum34==1.1.6
|
||||
idna==2.5
|
||||
ipaddress==1.0.18
|
||||
packaging==16.8
|
||||
paramiko==2.8.0
|
||||
pycparser==2.17
|
||||
pyOpenSSL==18.0.0
|
||||
pyparsing==2.2.0
|
||||
pywin32==301; sys_platform == 'win32'
|
||||
requests==2.26.0
|
||||
urllib3==1.26.5
|
||||
websocket-client==0.56.0
|
||||
packaging==21.3
|
||||
paramiko==2.11.0
|
||||
pywin32==304; sys_platform == 'win32'
|
||||
requests==2.28.1
|
||||
urllib3==1.26.11
|
||||
websocket-client==1.3.3
|
||||
|
|
31
setup.py
31
setup.py
|
@ -10,32 +10,25 @@ ROOT_DIR = os.path.dirname(__file__)
|
|||
SOURCE_DIR = os.path.join(ROOT_DIR)
|
||||
|
||||
requirements = [
|
||||
'packaging >= 14.0',
|
||||
'requests >= 2.26.0',
|
||||
'urllib3 >= 1.26.0',
|
||||
'websocket-client >= 0.32.0',
|
||||
'requests >= 2.14.2, != 2.18.0',
|
||||
]
|
||||
|
||||
extras_require = {
|
||||
# win32 APIs if on Windows (required for npipe support)
|
||||
':sys_platform == "win32"': 'pywin32==227',
|
||||
':sys_platform == "win32"': 'pywin32>=304',
|
||||
|
||||
# If using docker-py over TLS, highly recommend this option is
|
||||
# pip-installed or pinned.
|
||||
|
||||
# TODO: if pip installing both "requests" and "requests[security]", the
|
||||
# extra package from the "security" option are not installed (see
|
||||
# https://github.com/pypa/pip/issues/4391). Once that's fixed, instead of
|
||||
# installing the extra dependencies, install the following instead:
|
||||
# 'requests[security] >= 2.5.2, != 2.11.0, != 2.12.2'
|
||||
'tls': ['pyOpenSSL>=17.5.0', 'cryptography>=3.4.7', 'idna>=2.0.0'],
|
||||
# This is now a no-op, as similarly the requests[security] extra is
|
||||
# a no-op as of requests 2.26.0, this is always available/by default now
|
||||
# see https://github.com/psf/requests/pull/5867
|
||||
'tls': [],
|
||||
|
||||
# Only required when connecting using the ssh:// protocol
|
||||
'ssh': ['paramiko>=2.4.3'],
|
||||
|
||||
}
|
||||
|
||||
version = None
|
||||
exec(open('docker/version.py').read())
|
||||
|
||||
with open('./test-requirements.txt') as test_reqs_txt:
|
||||
test_requirements = [line for line in test_reqs_txt]
|
||||
|
||||
|
@ -46,7 +39,9 @@ with codecs.open('./README.md', encoding='utf-8') as readme_md:
|
|||
|
||||
setup(
|
||||
name="docker",
|
||||
version=version,
|
||||
use_scm_version={
|
||||
'write_to': 'docker/_version.py'
|
||||
},
|
||||
description="A Python library for the Docker Engine API.",
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/markdown',
|
||||
|
@ -58,10 +53,11 @@ setup(
|
|||
'Tracker': 'https://github.com/docker/docker-py/issues',
|
||||
},
|
||||
packages=find_packages(exclude=["tests.*", "tests"]),
|
||||
setup_requires=['setuptools_scm'],
|
||||
install_requires=requirements,
|
||||
tests_require=test_requirements,
|
||||
extras_require=extras_require,
|
||||
python_requires='>=3.6',
|
||||
python_requires='>=3.7',
|
||||
zip_safe=False,
|
||||
test_suite='tests',
|
||||
classifiers=[
|
||||
|
@ -71,7 +67,6 @@ setup(
|
|||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
setuptools==58.2.0
|
||||
coverage==6.0.1
|
||||
setuptools==63.2.0
|
||||
coverage==6.4.2
|
||||
flake8==4.0.1
|
||||
pytest==6.2.5
|
||||
pytest==7.1.2
|
||||
pytest-cov==3.0.0
|
||||
pytest-timeout==2.0.1
|
||||
pytest-timeout==2.1.0
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
ARG PYTHON_VERSION=3.7
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG PYTHON_VERSION=3.10
|
||||
|
||||
FROM python:${PYTHON_VERSION}
|
||||
|
||||
|
@ -11,7 +13,9 @@ RUN apt-get update && apt-get -y install --no-install-recommends \
|
|||
pass
|
||||
|
||||
# Add SSH keys and set permissions
|
||||
COPY tests/ssh-keys /root/.ssh
|
||||
COPY tests/ssh/config/client /root/.ssh
|
||||
COPY tests/ssh/config/server/known_ed25519.pub /root/.ssh/known_hosts
|
||||
RUN sed -i '1s;^;dpy-dind-ssh ;' /root/.ssh/known_hosts
|
||||
RUN chmod -R 600 /root/.ssh
|
||||
|
||||
COPY ./tests/gpg-keys /gpg-keys
|
||||
|
@ -27,11 +31,16 @@ RUN curl -sSL -o /opt/docker-credential-pass.tar.gz \
|
|||
chmod +x /usr/local/bin/docker-credential-pass
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
COPY requirements.txt /src/requirements.txt
|
||||
RUN pip install -r requirements.txt
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install -r requirements.txt
|
||||
|
||||
COPY test-requirements.txt /src/test-requirements.txt
|
||||
RUN pip install -r test-requirements.txt
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install -r test-requirements.txt
|
||||
|
||||
COPY . /src
|
||||
RUN pip install .
|
||||
ARG SETUPTOOLS_SCM_PRETEND_VERSION=99.0.0+docker
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install -e .
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
ARG PYTHON_VERSION=3.6
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG PYTHON_VERSION=3.10
|
||||
|
||||
FROM python:${PYTHON_VERSION}
|
||||
RUN mkdir /tmp/certs
|
||||
|
|
|
@ -1,23 +1,20 @@
|
|||
ARG API_VERSION=1.39
|
||||
ARG ENGINE_VERSION=19.03.12
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG API_VERSION=1.41
|
||||
ARG ENGINE_VERSION=20.10
|
||||
|
||||
FROM docker:${ENGINE_VERSION}-dind
|
||||
|
||||
RUN apk add --no-cache \
|
||||
openssh
|
||||
RUN apk add --no-cache --upgrade \
|
||||
openssh
|
||||
|
||||
# Add the keys and set permissions
|
||||
RUN ssh-keygen -A
|
||||
|
||||
# copy the test SSH config
|
||||
RUN echo "IgnoreUserKnownHosts yes" > /etc/ssh/sshd_config && \
|
||||
echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config && \
|
||||
echo "PermitRootLogin yes" >> /etc/ssh/sshd_config
|
||||
COPY tests/ssh/config/server /etc/ssh/
|
||||
|
||||
# set authorized keys for client paswordless connection
|
||||
COPY tests/ssh-keys/authorized_keys /root/.ssh/authorized_keys
|
||||
RUN chmod 600 /root/.ssh/authorized_keys
|
||||
COPY tests/ssh/config/client/id_rsa.pub /root/.ssh/authorized_keys
|
||||
|
||||
RUN echo "root:root" | chpasswd
|
||||
RUN ln -s /usr/local/bin/docker /usr/bin/docker
|
||||
# RUN echo "root:root" | chpasswd
|
||||
RUN chmod -R 600 /etc/ssh \
|
||||
&& chmod -R 600 /root/.ssh \
|
||||
&& ln -s /usr/local/bin/docker /usr/bin/docker
|
||||
EXPOSE 22
|
||||
|
|
|
@ -143,4 +143,4 @@ def ctrl_with(char):
|
|||
if re.match('[a-z]', char):
|
||||
return chr(ord(char) - ord('a') + 1).encode('ascii')
|
||||
else:
|
||||
raise(Exception('char must be [a-z]'))
|
||||
raise Exception('char must be [a-z]')
|
||||
|
|
|
@ -100,7 +100,9 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
'ignored',
|
||||
'Dockerfile',
|
||||
'.dockerignore',
|
||||
' ignored-with-spaces ', # check that spaces are trimmed
|
||||
'!ignored/subdir/excepted-file',
|
||||
'! ignored/subdir/excepted-with-spaces '
|
||||
'', # empty line,
|
||||
'#*', # comment line
|
||||
]))
|
||||
|
@ -111,6 +113,9 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
with open(os.path.join(base_dir, '#file.txt'), 'w') as f:
|
||||
f.write('this file should not be ignored')
|
||||
|
||||
with open(os.path.join(base_dir, 'ignored-with-spaces'), 'w') as f:
|
||||
f.write("this file should be ignored")
|
||||
|
||||
subdir = os.path.join(base_dir, 'ignored', 'subdir')
|
||||
os.makedirs(subdir)
|
||||
with open(os.path.join(subdir, 'file'), 'w') as f:
|
||||
|
@ -119,6 +124,9 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
with open(os.path.join(subdir, 'excepted-file'), 'w') as f:
|
||||
f.write("this file should not be ignored")
|
||||
|
||||
with open(os.path.join(subdir, 'excepted-with-spaces'), 'w') as f:
|
||||
f.write("this file should not be ignored")
|
||||
|
||||
tag = 'docker-py-test-build-with-dockerignore'
|
||||
stream = self.client.build(
|
||||
path=base_dir,
|
||||
|
@ -136,6 +144,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
|
||||
assert sorted(list(filter(None, logs.split('\n')))) == sorted([
|
||||
'/test/#file.txt',
|
||||
'/test/ignored/subdir/excepted-with-spaces',
|
||||
'/test/ignored/subdir/excepted-file',
|
||||
'/test/not-ignored'
|
||||
])
|
||||
|
|
|
@ -73,7 +73,7 @@ class ConfigAPITest(BaseAPIIntegrationTest):
|
|||
def test_create_config_with_templating(self):
|
||||
config_id = self.client.create_config(
|
||||
'favorite_character', 'sakuya izayoi',
|
||||
templating={ 'name': 'golang'}
|
||||
templating={'name': 'golang'}
|
||||
)
|
||||
self.tmp_configs.append(config_id)
|
||||
assert 'ID' in config_id
|
||||
|
|
|
@ -215,6 +215,20 @@ class CreateContainerTest(BaseAPIIntegrationTest):
|
|||
|
||||
self.client.kill(id)
|
||||
|
||||
@requires_api_version('1.41')
|
||||
def test_create_with_cgroupns(self):
|
||||
host_config = self.client.create_host_config(cgroupns='private')
|
||||
|
||||
container = self.client.create_container(
|
||||
image=TEST_IMG,
|
||||
command=['sleep', '60'],
|
||||
host_config=host_config,
|
||||
)
|
||||
self.tmp_containers.append(container)
|
||||
|
||||
res = self.client.inspect_container(container)
|
||||
assert 'private' == res['HostConfig']['CgroupnsMode']
|
||||
|
||||
def test_group_id_ints(self):
|
||||
container = self.client.create_container(
|
||||
TEST_IMG, 'id -G',
|
||||
|
@ -460,16 +474,13 @@ class CreateContainerTest(BaseAPIIntegrationTest):
|
|||
def test_create_with_device_cgroup_rules(self):
|
||||
rule = 'c 7:128 rwm'
|
||||
ctnr = self.client.create_container(
|
||||
TEST_IMG, 'cat /sys/fs/cgroup/devices/devices.list',
|
||||
host_config=self.client.create_host_config(
|
||||
TEST_IMG, 'true', host_config=self.client.create_host_config(
|
||||
device_cgroup_rules=[rule]
|
||||
)
|
||||
)
|
||||
self.tmp_containers.append(ctnr)
|
||||
config = self.client.inspect_container(ctnr)
|
||||
assert config['HostConfig']['DeviceCgroupRules'] == [rule]
|
||||
self.client.start(ctnr)
|
||||
assert rule in self.client.logs(ctnr).decode('utf-8')
|
||||
|
||||
def test_create_with_uts_mode(self):
|
||||
container = self.client.create_container(
|
||||
|
@ -1200,7 +1211,7 @@ class AttachContainerTest(BaseAPIIntegrationTest):
|
|||
sock = self.client.attach_socket(container, ws=False)
|
||||
assert sock.fileno() > -1
|
||||
|
||||
def test_run_container_reading_socket(self):
|
||||
def test_run_container_reading_socket_http(self):
|
||||
line = 'hi there and stuff and things, words!'
|
||||
# `echo` appends CRLF, `printf` doesn't
|
||||
command = f"printf '{line}'"
|
||||
|
@ -1220,12 +1231,33 @@ class AttachContainerTest(BaseAPIIntegrationTest):
|
|||
data = read_exactly(pty_stdout, next_size)
|
||||
assert data.decode('utf-8') == line
|
||||
|
||||
@pytest.mark.xfail(condition=bool(os.environ.get('DOCKER_CERT_PATH', '')),
|
||||
reason='DOCKER_CERT_PATH not respected for websockets')
|
||||
def test_run_container_reading_socket_ws(self):
|
||||
line = 'hi there and stuff and things, words!'
|
||||
# `echo` appends CRLF, `printf` doesn't
|
||||
command = f"printf '{line}'"
|
||||
container = self.client.create_container(TEST_IMG, command,
|
||||
detach=True, tty=False)
|
||||
self.tmp_containers.append(container)
|
||||
|
||||
opts = {"stdout": 1, "stream": 1, "logs": 1}
|
||||
pty_stdout = self.client.attach_socket(container, opts, ws=True)
|
||||
self.addCleanup(pty_stdout.close)
|
||||
|
||||
self.client.start(container)
|
||||
|
||||
data = pty_stdout.recv()
|
||||
assert data.decode('utf-8') == line
|
||||
|
||||
@pytest.mark.timeout(10)
|
||||
def test_attach_no_stream(self):
|
||||
container = self.client.create_container(
|
||||
TEST_IMG, 'echo hello'
|
||||
)
|
||||
self.tmp_containers.append(container)
|
||||
self.client.start(container)
|
||||
self.client.wait(container, condition='not-running')
|
||||
output = self.client.attach(container, stream=False, logs=True)
|
||||
assert output == 'hello\n'.encode(encoding='ascii')
|
||||
|
||||
|
|
|
@ -281,7 +281,7 @@ class ImportImageTest(BaseAPIIntegrationTest):
|
|||
|
||||
server = socketserver.TCPServer(('', 0), Handler)
|
||||
thread = threading.Thread(target=server.serve_forever)
|
||||
thread.setDaemon(True)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
yield f'http://{socket.gethostname()}:{server.server_address[1]}'
|
||||
|
|
|
@ -408,6 +408,22 @@ class TestNetworks(BaseAPIIntegrationTest):
|
|||
net_data = container_data['NetworkSettings']['Networks'][net_name]
|
||||
assert net_data['IPAMConfig']['IPv6Address'] == '2001:389::f00d'
|
||||
|
||||
@requires_api_version('1.25')
|
||||
def test_connect_with_mac_address(self):
|
||||
net_name, net_id = self.create_network()
|
||||
|
||||
container = self.client.create_container(TEST_IMG, 'top')
|
||||
self.tmp_containers.append(container)
|
||||
|
||||
self.client.connect_container_to_network(
|
||||
container, net_name, mac_address='02:42:ac:11:00:02'
|
||||
)
|
||||
|
||||
container_data = self.client.inspect_container(container)
|
||||
|
||||
net_data = container_data['NetworkSettings']['Networks'][net_name]
|
||||
assert net_data['MacAddress'] == '02:42:ac:11:00:02'
|
||||
|
||||
@requires_api_version('1.23')
|
||||
def test_create_internal_networks(self):
|
||||
_, net_id = self.create_network(internal=True)
|
||||
|
|
|
@ -22,13 +22,13 @@ class PluginTest(BaseAPIIntegrationTest):
|
|||
def teardown_method(self, method):
|
||||
client = self.get_client_instance()
|
||||
try:
|
||||
client.disable_plugin(SSHFS)
|
||||
client.disable_plugin(SSHFS, True)
|
||||
except docker.errors.APIError:
|
||||
pass
|
||||
|
||||
for p in self.tmp_plugins:
|
||||
try:
|
||||
client.remove_plugin(p, force=True)
|
||||
client.remove_plugin(p)
|
||||
except docker.errors.APIError:
|
||||
pass
|
||||
|
||||
|
|
|
@ -626,6 +626,39 @@ class ServiceTest(BaseAPIIntegrationTest):
|
|||
assert 'Replicated' in svc_info['Spec']['Mode']
|
||||
assert svc_info['Spec']['Mode']['Replicated'] == {'Replicas': 5}
|
||||
|
||||
@requires_api_version('1.41')
|
||||
def test_create_service_global_job_mode(self):
|
||||
container_spec = docker.types.ContainerSpec(
|
||||
TEST_IMG, ['echo', 'hello']
|
||||
)
|
||||
task_tmpl = docker.types.TaskTemplate(container_spec)
|
||||
name = self.get_service_name()
|
||||
svc_id = self.client.create_service(
|
||||
task_tmpl, name=name, mode='global-job'
|
||||
)
|
||||
svc_info = self.client.inspect_service(svc_id)
|
||||
assert 'Mode' in svc_info['Spec']
|
||||
assert 'GlobalJob' in svc_info['Spec']['Mode']
|
||||
|
||||
@requires_api_version('1.41')
|
||||
def test_create_service_replicated_job_mode(self):
|
||||
container_spec = docker.types.ContainerSpec(
|
||||
TEST_IMG, ['echo', 'hello']
|
||||
)
|
||||
task_tmpl = docker.types.TaskTemplate(container_spec)
|
||||
name = self.get_service_name()
|
||||
svc_id = self.client.create_service(
|
||||
task_tmpl, name=name,
|
||||
mode=docker.types.ServiceMode('replicated-job', 5)
|
||||
)
|
||||
svc_info = self.client.inspect_service(svc_id)
|
||||
assert 'Mode' in svc_info['Spec']
|
||||
assert 'ReplicatedJob' in svc_info['Spec']['Mode']
|
||||
assert svc_info['Spec']['Mode']['ReplicatedJob'] == {
|
||||
'MaxConcurrent': 1,
|
||||
'TotalCompletions': 5
|
||||
}
|
||||
|
||||
@requires_api_version('1.25')
|
||||
def test_update_service_force_update(self):
|
||||
container_spec = docker.types.ContainerSpec(
|
||||
|
@ -1386,3 +1419,23 @@ class ServiceTest(BaseAPIIntegrationTest):
|
|||
assert services[0]['ID'] == svc_id['ID']
|
||||
spec = services[0]['Spec']['TaskTemplate']['ContainerSpec']
|
||||
assert 'CAP_SYSLOG' in spec['CapabilityDrop']
|
||||
|
||||
@requires_api_version('1.40')
|
||||
def test_create_service_with_sysctl(self):
|
||||
name = self.get_service_name()
|
||||
sysctls = {
|
||||
'net.core.somaxconn': '1024',
|
||||
'net.ipv4.tcp_syncookies': '0',
|
||||
}
|
||||
container_spec = docker.types.ContainerSpec(
|
||||
TEST_IMG, ['echo', 'hello'], sysctls=sysctls
|
||||
)
|
||||
task_tmpl = docker.types.TaskTemplate(container_spec)
|
||||
svc_id = self.client.create_service(task_tmpl, name=name)
|
||||
assert self.client.inspect_service(svc_id)
|
||||
services = self.client.services(filters={'name': name})
|
||||
assert len(services) == 1
|
||||
assert services[0]['ID'] == svc_id['ID']
|
||||
spec = services[0]['Spec']['TaskTemplate']['ContainerSpec']
|
||||
assert spec['Sysctls']['net.core.somaxconn'] == '1024'
|
||||
assert spec['Sysctls']['net.ipv4.tcp_syncookies'] == '0'
|
||||
|
|
|
@ -253,3 +253,8 @@ class SwarmTest(BaseAPIIntegrationTest):
|
|||
@pytest.mark.xfail(reason='Can fail if eth0 has multiple IP addresses')
|
||||
def test_init_swarm_data_path_addr(self):
|
||||
assert self.init_swarm(data_path_addr='eth0')
|
||||
|
||||
@requires_api_version('1.40')
|
||||
def test_init_swarm_data_path_port(self):
|
||||
assert self.init_swarm(data_path_port=4242)
|
||||
assert self.client.inspect_swarm()['DataPathPort'] == 4242
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import os
|
||||
import random
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from distutils.spawn import find_executable
|
||||
|
||||
from docker.credentials import (
|
||||
CredentialsNotFound, Store, StoreError, DEFAULT_LINUX_STORE,
|
||||
|
@ -22,9 +22,9 @@ class TestStore:
|
|||
def setup_method(self):
|
||||
self.tmp_keys = []
|
||||
if sys.platform.startswith('linux'):
|
||||
if find_executable('docker-credential-' + DEFAULT_LINUX_STORE):
|
||||
if shutil.which('docker-credential-' + DEFAULT_LINUX_STORE):
|
||||
self.store = Store(DEFAULT_LINUX_STORE)
|
||||
elif find_executable('docker-credential-pass'):
|
||||
elif shutil.which('docker-credential-pass'):
|
||||
self.store = Store('pass')
|
||||
else:
|
||||
raise Exception('No supported docker-credential store in PATH')
|
||||
|
|
|
@ -30,13 +30,18 @@ class ServiceTest(unittest.TestCase):
|
|||
# ContainerSpec arguments
|
||||
image="alpine",
|
||||
command="sleep 300",
|
||||
container_labels={'container': 'label'}
|
||||
container_labels={'container': 'label'},
|
||||
rollback_config={'order': 'start-first'}
|
||||
)
|
||||
assert service.name == name
|
||||
assert service.attrs['Spec']['Labels']['foo'] == 'bar'
|
||||
container_spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec']
|
||||
assert "alpine" in container_spec['Image']
|
||||
assert container_spec['Labels'] == {'container': 'label'}
|
||||
spec_rollback = service.attrs['Spec'].get('RollbackConfig', None)
|
||||
assert spec_rollback is not None
|
||||
assert ('Order' in spec_rollback and
|
||||
spec_rollback['Order'] == 'start-first')
|
||||
|
||||
def test_create_with_network(self):
|
||||
client = docker.from_env(version=TEST_API_VERSION)
|
||||
|
|
|
@ -8,6 +8,7 @@ import pytest
|
|||
|
||||
|
||||
class TestRegressions(BaseAPIIntegrationTest):
|
||||
@pytest.mark.xfail(True, reason='Docker API always returns chunked resp')
|
||||
def test_443_handle_nonchunked_response_in_stream(self):
|
||||
dfile = io.BytesIO()
|
||||
with pytest.raises(docker.errors.APIError) as exc:
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/BiXkbL9oEbE3PJv1S2p12XK5BHW3qQT5Rf+CYG0ATYyMPIVM6+IXVyf3QNxpnvPXvbPBQJCs0qHeuPwZy2Gsbt35QnmlgrczFPiXXosCD2N+wrcOQPZGuLjQyUUP2yJRVSTLpp8zk2F8w3laGIB3Jk1hUcMUExemKxQYk/L40b5rXKkarLk5awBuicjRStMrchPRHZ2n715TG+zSvf8tB/UHRXKYPqai/Je5eiH3yGUzCY4zn+uEoqAFb4V8lpIj8Rw3EXmCYVwG0vg+44QIQ2gJnIhTlcmxwkynvZn97nug4NLlGJQ+sDCnIvMapycHfGkNlBz3fFtu/ORsxPpZbTNg/9noa3Zf8OpIwvE/FHNPqDctGltwxEgQxj5fE34x0fYnF08tejAUJJCZE3YsGgNabsS4pD+kRhI83eFZvgj3Q1AeTK0V9bRM7jujcc9Rz+V9Gb5zYEHN/l8PxEVlj0OlURf9ZlknNQK8xRh597jDXTfVQKCMO/nRaWH2bq0=
|
|
@ -1,3 +0,0 @@
|
|||
Host *
|
||||
StrictHostKeyChecking no
|
||||
UserKnownHostsFile=/dev/null
|
|
@ -2,6 +2,8 @@ import os
|
|||
import shutil
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
import docker
|
||||
from .. import helpers
|
||||
from docker.utils import kwargs_from_env
|
||||
|
@ -68,6 +70,8 @@ class BaseIntegrationTest(unittest.TestCase):
|
|||
client.close()
|
||||
|
||||
|
||||
@pytest.mark.skipif(not os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
|
||||
reason='DOCKER_HOST is not an SSH target')
|
||||
class BaseAPIIntegrationTest(BaseIntegrationTest):
|
||||
"""
|
||||
A test case for `APIClient` integration tests. It sets up an `APIClient`
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
-----BEGIN OPENSSH PRIVATE KEY-----
|
||||
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
|
||||
QyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4QAAAJgIMffcCDH3
|
||||
3AAAAAtzc2gtZWQyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4Q
|
||||
AAAEDeXnt5AuNk4oTHjMU1vUsEwh64fuEPu4hXsG6wCVt/6Iax81dU/Xw3tcLohAa67FdB
|
||||
FtPGU8YuP7n8IHKP16DhAAAAEXJvb3RAMGRkZmQyMWRkYjM3AQIDBA==
|
||||
-----END OPENSSH PRIVATE KEY-----
|
|
@ -0,0 +1 @@
|
|||
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIax81dU/Xw3tcLohAa67FdBFtPGU8YuP7n8IHKP16Dh docker-py integration tests known
|
|
@ -0,0 +1,3 @@
|
|||
IgnoreUserKnownHosts yes
|
||||
PubkeyAuthentication yes
|
||||
PermitRootLogin yes
|
|
@ -0,0 +1,7 @@
|
|||
-----BEGIN OPENSSH PRIVATE KEY-----
|
||||
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
|
||||
QyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4QAAAJgIMffcCDH3
|
||||
3AAAAAtzc2gtZWQyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4Q
|
||||
AAAEDeXnt5AuNk4oTHjMU1vUsEwh64fuEPu4hXsG6wCVt/6Iax81dU/Xw3tcLohAa67FdB
|
||||
FtPGU8YuP7n8IHKP16DhAAAAEXJvb3RAMGRkZmQyMWRkYjM3AQIDBA==
|
||||
-----END OPENSSH PRIVATE KEY-----
|
|
@ -0,0 +1 @@
|
|||
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIax81dU/Xw3tcLohAa67FdBFtPGU8YuP7n8IHKP16Dh docker-py integration tests unknown
|
|
@ -0,0 +1,22 @@
|
|||
import os
|
||||
import unittest
|
||||
|
||||
import docker
|
||||
import paramiko.ssh_exception
|
||||
import pytest
|
||||
from .base import TEST_API_VERSION
|
||||
|
||||
|
||||
class SSHConnectionTest(unittest.TestCase):
|
||||
@pytest.mark.skipif('UNKNOWN_DOCKER_SSH_HOST' not in os.environ,
|
||||
reason='Unknown Docker SSH host not configured')
|
||||
def test_ssh_unknown_host(self):
|
||||
with self.assertRaises(paramiko.ssh_exception.SSHException) as cm:
|
||||
docker.APIClient(
|
||||
version=TEST_API_VERSION,
|
||||
timeout=60,
|
||||
# test only valid with Paramiko
|
||||
use_ssh_client=False,
|
||||
base_url=os.environ['UNKNOWN_DOCKER_SSH_HOST'],
|
||||
)
|
||||
self.assertIn('not found in known_hosts', str(cm.exception))
|
|
@ -24,7 +24,8 @@ class StartContainerTest(BaseAPIClientTest):
|
|||
self.client.start(fake_api.FAKE_CONTAINER_ID)
|
||||
|
||||
args = fake_request.call_args
|
||||
assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/start'
|
||||
assert args[0][1] == (url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/start')
|
||||
assert 'data' not in args[1]
|
||||
assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
|
||||
|
||||
|
@ -117,7 +118,8 @@ class StartContainerTest(BaseAPIClientTest):
|
|||
self.client.start({'Id': fake_api.FAKE_CONTAINER_ID})
|
||||
|
||||
args = fake_request.call_args
|
||||
assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/start'
|
||||
assert args[0][1] == (url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/start')
|
||||
assert 'data' not in args[1]
|
||||
assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
|
||||
|
||||
|
@ -346,6 +348,22 @@ class CreateContainerTest(BaseAPIClientTest):
|
|||
assert args[1]['headers'] == {'Content-Type': 'application/json'}
|
||||
assert args[1]['params'] == {'name': 'marisa-kirisame'}
|
||||
|
||||
def test_create_container_with_platform(self):
|
||||
self.client.create_container('busybox', 'true',
|
||||
platform='linux')
|
||||
|
||||
args = fake_request.call_args
|
||||
assert args[0][1] == url_prefix + 'containers/create'
|
||||
assert json.loads(args[1]['data']) == json.loads('''
|
||||
{"Tty": false, "Image": "busybox", "Cmd": ["true"],
|
||||
"AttachStdin": false,
|
||||
"AttachStderr": true, "AttachStdout": true,
|
||||
"StdinOnce": false,
|
||||
"OpenStdin": false, "NetworkDisabled": false}
|
||||
''')
|
||||
assert args[1]['headers'] == {'Content-Type': 'application/json'}
|
||||
assert args[1]['params'] == {'name': None, 'platform': 'linux'}
|
||||
|
||||
def test_create_container_with_mem_limit_as_int(self):
|
||||
self.client.create_container(
|
||||
'busybox', 'true', host_config=self.client.create_host_config(
|
||||
|
@ -1051,6 +1069,25 @@ class CreateContainerTest(BaseAPIClientTest):
|
|||
''')
|
||||
assert args[1]['headers'] == {'Content-Type': 'application/json'}
|
||||
|
||||
@requires_api_version('1.41')
|
||||
def test_create_container_with_cgroupns(self):
|
||||
self.client.create_container(
|
||||
image='busybox',
|
||||
command='true',
|
||||
host_config=self.client.create_host_config(
|
||||
cgroupns='private',
|
||||
),
|
||||
)
|
||||
|
||||
args = fake_request.call_args
|
||||
assert args[0][1] == url_prefix + 'containers/create'
|
||||
|
||||
expected_payload = self.base_create_payload()
|
||||
expected_payload['HostConfig'] = self.client.create_host_config()
|
||||
expected_payload['HostConfig']['CgroupnsMode'] = 'private'
|
||||
assert json.loads(args[1]['data']) == expected_payload
|
||||
assert args[1]['headers'] == {'Content-Type': 'application/json'}
|
||||
|
||||
|
||||
class ContainerTest(BaseAPIClientTest):
|
||||
def test_list_containers(self):
|
||||
|
@ -1079,7 +1116,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/resize',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/resize'),
|
||||
params={'h': 15, 'w': 120},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1092,7 +1130,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/rename',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/rename'),
|
||||
params={'name': 'foobar'},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1102,7 +1141,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/wait',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/wait',
|
||||
timeout=None,
|
||||
params={}
|
||||
)
|
||||
|
@ -1112,7 +1151,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/wait',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/wait',
|
||||
timeout=None,
|
||||
params={}
|
||||
)
|
||||
|
@ -1124,7 +1163,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all'},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1140,7 +1179,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all'},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1157,7 +1196,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all'},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1172,7 +1211,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all'},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1186,7 +1225,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all'},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1201,7 +1240,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all'},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1217,7 +1256,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 10},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1233,7 +1272,23 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all', 'since': ts},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
stream=False
|
||||
)
|
||||
|
||||
def test_log_since_with_float(self):
|
||||
ts = 809222400.000000
|
||||
with mock.patch('docker.api.client.APIClient.inspect_container',
|
||||
fake_inspect_container):
|
||||
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
|
||||
follow=False, since=ts)
|
||||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all', 'since': ts},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1250,7 +1305,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all', 'since': ts},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1262,7 +1317,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
fake_inspect_container):
|
||||
with pytest.raises(docker.errors.InvalidArgument):
|
||||
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
|
||||
follow=False, since=42.42)
|
||||
follow=False, since="42.42")
|
||||
|
||||
def test_log_tty(self):
|
||||
m = mock.Mock()
|
||||
|
@ -1276,7 +1331,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
assert m.called
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/logs',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs',
|
||||
params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
|
||||
'tail': 'all'},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS,
|
||||
|
@ -1288,7 +1343,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/changes',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/changes'),
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
||||
|
@ -1297,7 +1353,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/changes',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/changes'),
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
||||
|
@ -1306,7 +1363,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/json',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/json',
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
||||
|
@ -1317,7 +1374,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/stop',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/stop',
|
||||
params={'t': timeout},
|
||||
timeout=(DEFAULT_TIMEOUT_SECONDS + timeout)
|
||||
)
|
||||
|
@ -1330,7 +1387,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/stop',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/stop',
|
||||
params={'t': timeout},
|
||||
timeout=(DEFAULT_TIMEOUT_SECONDS + timeout)
|
||||
)
|
||||
|
@ -1340,7 +1397,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/pause',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/pause'),
|
||||
timeout=(DEFAULT_TIMEOUT_SECONDS)
|
||||
)
|
||||
|
||||
|
@ -1349,7 +1407,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/unpause',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/unpause'),
|
||||
timeout=(DEFAULT_TIMEOUT_SECONDS)
|
||||
)
|
||||
|
||||
|
@ -1358,7 +1417,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/kill',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/kill',
|
||||
params={},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1368,7 +1427,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/kill',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/kill',
|
||||
params={},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1378,7 +1437,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/kill',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/kill',
|
||||
params={'signal': signal.SIGTERM},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1388,7 +1447,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/restart',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/restart'),
|
||||
params={'t': 2},
|
||||
timeout=(DEFAULT_TIMEOUT_SECONDS + 2)
|
||||
)
|
||||
|
@ -1398,7 +1458,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'containers/3cc2351ab11b/restart',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/restart'),
|
||||
params={'t': 2},
|
||||
timeout=(DEFAULT_TIMEOUT_SECONDS + 2)
|
||||
)
|
||||
|
@ -1408,7 +1469,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'DELETE',
|
||||
url_prefix + 'containers/3cc2351ab11b',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID,
|
||||
params={'v': False, 'link': False, 'force': False},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1418,7 +1479,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'DELETE',
|
||||
url_prefix + 'containers/3cc2351ab11b',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID,
|
||||
params={'v': False, 'link': False, 'force': False},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1428,7 +1489,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/export',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/export'),
|
||||
stream=True,
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1438,7 +1500,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/export',
|
||||
(url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/export'),
|
||||
stream=True,
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1448,7 +1511,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/json',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/json',
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
||||
|
@ -1464,7 +1527,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/stats',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/stats',
|
||||
timeout=60,
|
||||
stream=True
|
||||
)
|
||||
|
@ -1474,7 +1537,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/top',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/top',
|
||||
params={},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1484,7 +1547,7 @@ class ContainerTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'containers/3cc2351ab11b/top',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/top',
|
||||
params={'ps_args': 'waux'},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -1496,7 +1559,8 @@ class ContainerTest(BaseAPIClientTest):
|
|||
blkio_weight=345
|
||||
)
|
||||
args = fake_request.call_args
|
||||
assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/update'
|
||||
assert args[0][1] == (url_prefix + 'containers/' +
|
||||
fake_api.FAKE_CONTAINER_ID + '/update')
|
||||
assert json.loads(args[1]['data']) == {
|
||||
'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345
|
||||
}
|
||||
|
|
|
@ -100,7 +100,7 @@ class ImageTest(BaseAPIClientTest):
|
|||
'repo': None,
|
||||
'comment': None,
|
||||
'tag': None,
|
||||
'container': '3cc2351ab11b',
|
||||
'container': fake_api.FAKE_CONTAINER_ID,
|
||||
'author': None,
|
||||
'changes': None
|
||||
},
|
||||
|
@ -112,7 +112,7 @@ class ImageTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'DELETE',
|
||||
url_prefix + 'images/e9aa60c60128',
|
||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID,
|
||||
params={'force': False, 'noprune': False},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -287,7 +287,7 @@ class ImageTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'images/e9aa60c60128/tag',
|
||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
|
||||
params={
|
||||
'tag': None,
|
||||
'repo': 'repo',
|
||||
|
@ -305,7 +305,7 @@ class ImageTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'images/e9aa60c60128/tag',
|
||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
|
||||
params={
|
||||
'tag': 'tag',
|
||||
'repo': 'repo',
|
||||
|
@ -320,7 +320,7 @@ class ImageTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'POST',
|
||||
url_prefix + 'images/e9aa60c60128/tag',
|
||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
|
||||
params={
|
||||
'tag': None,
|
||||
'repo': 'repo',
|
||||
|
@ -334,7 +334,7 @@ class ImageTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'GET',
|
||||
url_prefix + 'images/e9aa60c60128/get',
|
||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/get',
|
||||
stream=True,
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
|
|
@ -318,7 +318,7 @@ class DockerApiTest(BaseAPIClientTest):
|
|||
|
||||
fake_request.assert_called_with(
|
||||
'DELETE',
|
||||
url_prefix + 'containers/3cc2351ab11b',
|
||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID,
|
||||
params={'v': False, 'link': True, 'force': False},
|
||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||
)
|
||||
|
@ -378,7 +378,7 @@ class UnixSocketStreamTest(unittest.TestCase):
|
|||
self.server_socket = self._setup_socket()
|
||||
self.stop_server = False
|
||||
server_thread = threading.Thread(target=self.run_server)
|
||||
server_thread.setDaemon(True)
|
||||
server_thread.daemon = True
|
||||
server_thread.start()
|
||||
self.response = None
|
||||
self.request_handler = None
|
||||
|
@ -488,7 +488,7 @@ class TCPSocketStreamTest(unittest.TestCase):
|
|||
cls.server = socketserver.ThreadingTCPServer(
|
||||
('', 0), cls.get_handler_class())
|
||||
cls.thread = threading.Thread(target=cls.server.serve_forever)
|
||||
cls.thread.setDaemon(True)
|
||||
cls.thread.daemon = True
|
||||
cls.thread.start()
|
||||
cls.address = 'http://{}:{}'.format(
|
||||
socket.gethostname(), cls.server.server_address[1])
|
||||
|
|
|
@ -11,6 +11,7 @@ from docker import auth, credentials, errors
|
|||
from unittest import mock
|
||||
import pytest
|
||||
|
||||
|
||||
class RegressionTest(unittest.TestCase):
|
||||
def test_803_urlsafe_encode(self):
|
||||
auth_data = {
|
||||
|
|
|
@ -325,10 +325,26 @@ class ServiceModeTest(unittest.TestCase):
|
|||
assert mode.mode == 'global'
|
||||
assert mode.replicas is None
|
||||
|
||||
def test_replicated_job_simple(self):
|
||||
mode = ServiceMode('replicated-job')
|
||||
assert mode == {'ReplicatedJob': {}}
|
||||
assert mode.mode == 'ReplicatedJob'
|
||||
assert mode.replicas is None
|
||||
|
||||
def test_global_job_simple(self):
|
||||
mode = ServiceMode('global-job')
|
||||
assert mode == {'GlobalJob': {}}
|
||||
assert mode.mode == 'GlobalJob'
|
||||
assert mode.replicas is None
|
||||
|
||||
def test_global_replicas_error(self):
|
||||
with pytest.raises(InvalidArgument):
|
||||
ServiceMode('global', 21)
|
||||
|
||||
def test_global_job_replicas_simple(self):
|
||||
with pytest.raises(InvalidArgument):
|
||||
ServiceMode('global-job', 21)
|
||||
|
||||
def test_replicated_replicas(self):
|
||||
mode = ServiceMode('replicated', 21)
|
||||
assert mode == {'replicated': {'Replicas': 21}}
|
||||
|
|
|
@ -4,10 +4,10 @@ from . import fake_stat
|
|||
|
||||
CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}'
|
||||
|
||||
FAKE_CONTAINER_ID = '3cc2351ab11b'
|
||||
FAKE_IMAGE_ID = 'e9aa60c60128'
|
||||
FAKE_EXEC_ID = 'd5d177f121dc'
|
||||
FAKE_NETWORK_ID = '33fb6a3462b8'
|
||||
FAKE_CONTAINER_ID = '81cf499cc928ce3fedc250a080d2b9b978df20e4517304c45211e8a68b33e254' # noqa: E501
|
||||
FAKE_IMAGE_ID = 'sha256:fe7a8fc91d3f17835cbb3b86a1c60287500ab01a53bc79c4497d09f07a3f0688' # noqa: E501
|
||||
FAKE_EXEC_ID = 'b098ec855f10434b5c7c973c78484208223a83f663ddaefb0f02a242840cb1c7' # noqa: E501
|
||||
FAKE_NETWORK_ID = '1999cfb42e414483841a125ade3c276c3cb80cb3269b14e339354ac63a31b02c' # noqa: E501
|
||||
FAKE_IMAGE_NAME = 'test_image'
|
||||
FAKE_TARBALL_PATH = '/path/to/tarball'
|
||||
FAKE_REPO_NAME = 'repo'
|
||||
|
@ -546,56 +546,56 @@ fake_responses = {
|
|||
post_fake_import_image,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/json':
|
||||
get_fake_containers,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/start':
|
||||
post_fake_start_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/resize':
|
||||
post_fake_resize_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/json':
|
||||
get_fake_inspect_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/rename':
|
||||
post_fake_rename_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag':
|
||||
f'{prefix}/{CURRENT_VERSION}/images/{FAKE_IMAGE_ID}/tag':
|
||||
post_fake_tag_image,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/wait':
|
||||
get_fake_wait,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/logs':
|
||||
get_fake_logs,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/changes':
|
||||
get_fake_diff,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/export':
|
||||
get_fake_export,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/update':
|
||||
post_fake_update_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/exec':
|
||||
post_fake_exec_create,
|
||||
f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start':
|
||||
f'{prefix}/{CURRENT_VERSION}/exec/{FAKE_EXEC_ID}/start':
|
||||
post_fake_exec_start,
|
||||
f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json':
|
||||
f'{prefix}/{CURRENT_VERSION}/exec/{FAKE_EXEC_ID}/json':
|
||||
get_fake_exec_inspect,
|
||||
f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize':
|
||||
f'{prefix}/{CURRENT_VERSION}/exec/{FAKE_EXEC_ID}/resize':
|
||||
post_fake_exec_resize,
|
||||
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/stats':
|
||||
get_fake_stats,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/top':
|
||||
get_fake_top,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/stop':
|
||||
post_fake_stop_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/kill':
|
||||
post_fake_kill_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/pause':
|
||||
post_fake_pause_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/unpause':
|
||||
post_fake_unpause_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/restart':
|
||||
post_fake_restart_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b':
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}':
|
||||
delete_fake_remove_container,
|
||||
f'{prefix}/{CURRENT_VERSION}/images/create':
|
||||
post_fake_image_create,
|
||||
f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128':
|
||||
f'{prefix}/{CURRENT_VERSION}/images/{FAKE_IMAGE_ID}':
|
||||
delete_fake_remove_image,
|
||||
f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get':
|
||||
f'{prefix}/{CURRENT_VERSION}/images/{FAKE_IMAGE_ID}/get':
|
||||
get_fake_get_image,
|
||||
f'{prefix}/{CURRENT_VERSION}/images/load':
|
||||
post_fake_load_image,
|
||||
|
|
|
@ -39,6 +39,7 @@ class ContainerCollectionTest(unittest.TestCase):
|
|||
cap_add=['foo'],
|
||||
cap_drop=['bar'],
|
||||
cgroup_parent='foobar',
|
||||
cgroupns='host',
|
||||
cpu_period=1,
|
||||
cpu_quota=2,
|
||||
cpu_shares=5,
|
||||
|
@ -77,6 +78,7 @@ class ContainerCollectionTest(unittest.TestCase):
|
|||
oom_score_adj=5,
|
||||
pid_mode='host',
|
||||
pids_limit=500,
|
||||
platform='linux',
|
||||
ports={
|
||||
1111: 4567,
|
||||
2222: None
|
||||
|
@ -134,6 +136,7 @@ class ContainerCollectionTest(unittest.TestCase):
|
|||
'BlkioWeight': 2,
|
||||
'CapAdd': ['foo'],
|
||||
'CapDrop': ['bar'],
|
||||
'CgroupnsMode': 'host',
|
||||
'CgroupParent': 'foobar',
|
||||
'CpuPeriod': 1,
|
||||
'CpuQuota': 2,
|
||||
|
@ -186,6 +189,7 @@ class ContainerCollectionTest(unittest.TestCase):
|
|||
name='somename',
|
||||
network_disabled=False,
|
||||
networking_config={'foo': None},
|
||||
platform='linux',
|
||||
ports=[('1111', 'tcp'), ('2222', 'tcp')],
|
||||
stdin_open=True,
|
||||
stop_signal=9,
|
||||
|
@ -314,6 +318,33 @@ class ContainerCollectionTest(unittest.TestCase):
|
|||
'NetworkMode': 'default'}
|
||||
)
|
||||
|
||||
def test_run_platform(self):
|
||||
client = make_fake_client()
|
||||
|
||||
# raise exception on first call, then return normal value
|
||||
client.api.create_container.side_effect = [
|
||||
docker.errors.ImageNotFound(""),
|
||||
client.api.create_container.return_value
|
||||
]
|
||||
|
||||
client.containers.run(image='alpine', platform='linux/arm64')
|
||||
|
||||
client.api.pull.assert_called_with(
|
||||
'alpine',
|
||||
tag='latest',
|
||||
all_tags=False,
|
||||
stream=True,
|
||||
platform='linux/arm64',
|
||||
)
|
||||
|
||||
client.api.create_container.assert_called_with(
|
||||
detach=False,
|
||||
platform='linux/arm64',
|
||||
image='alpine',
|
||||
command=None,
|
||||
host_config={'NetworkMode': 'default'},
|
||||
)
|
||||
|
||||
def test_create(self):
|
||||
client = make_fake_client()
|
||||
container = client.containers.create(
|
||||
|
@ -377,6 +408,11 @@ class ContainerCollectionTest(unittest.TestCase):
|
|||
|
||||
|
||||
class ContainerTest(unittest.TestCase):
|
||||
def test_short_id(self):
|
||||
container = Container(attrs={'Id': '8497fe9244dd45cac543eb3c37d8605077'
|
||||
'6800eebef1f3ec2ee111e8ccf12db6'})
|
||||
assert container.short_id == '8497fe9244dd'
|
||||
|
||||
def test_name(self):
|
||||
client = make_fake_client()
|
||||
container = client.containers.get(FAKE_CONTAINER_ID)
|
||||
|
|
|
@ -122,11 +122,11 @@ class ImageTest(unittest.TestCase):
|
|||
def test_short_id(self):
|
||||
image = Image(attrs={'Id': 'sha256:b6846070672ce4e8f1f91564ea6782bd675'
|
||||
'f69d65a6f73ef6262057ad0a15dcd'})
|
||||
assert image.short_id == 'sha256:b684607067'
|
||||
assert image.short_id == 'sha256:b6846070672c'
|
||||
|
||||
image = Image(attrs={'Id': 'b6846070672ce4e8f1f91564ea6782bd675'
|
||||
'f69d65a6f73ef6262057ad0a15dcd'})
|
||||
assert image.short_id == 'b684607067'
|
||||
assert image.short_id == 'b6846070672c'
|
||||
|
||||
def test_tags(self):
|
||||
image = Image(attrs={
|
||||
|
@ -150,6 +150,16 @@ class ImageTest(unittest.TestCase):
|
|||
image.history()
|
||||
client.api.history.assert_called_with(FAKE_IMAGE_ID)
|
||||
|
||||
def test_remove(self):
|
||||
client = make_fake_client()
|
||||
image = client.images.get(FAKE_IMAGE_ID)
|
||||
image.remove()
|
||||
client.api.remove_image.assert_called_with(
|
||||
FAKE_IMAGE_ID,
|
||||
force=False,
|
||||
noprune=False,
|
||||
)
|
||||
|
||||
def test_save(self):
|
||||
client = make_fake_client()
|
||||
image = client.images.get(FAKE_IMAGE_ID)
|
||||
|
|
|
@ -11,6 +11,7 @@ class CreateServiceKwargsTest(unittest.TestCase):
|
|||
'labels': {'key': 'value'},
|
||||
'hostname': 'test_host',
|
||||
'mode': 'global',
|
||||
'rollback_config': {'rollback': 'config'},
|
||||
'update_config': {'update': 'config'},
|
||||
'networks': ['somenet'],
|
||||
'endpoint_spec': {'blah': 'blah'},
|
||||
|
@ -28,7 +29,8 @@ class CreateServiceKwargsTest(unittest.TestCase):
|
|||
'constraints': ['foo=bar'],
|
||||
'preferences': ['bar=baz'],
|
||||
'platforms': [('x86_64', 'linux')],
|
||||
'maxreplicas': 1
|
||||
'maxreplicas': 1,
|
||||
'sysctls': {'foo': 'bar'}
|
||||
})
|
||||
|
||||
task_template = kwargs.pop('task_template')
|
||||
|
@ -37,6 +39,7 @@ class CreateServiceKwargsTest(unittest.TestCase):
|
|||
'name': 'somename',
|
||||
'labels': {'key': 'value'},
|
||||
'mode': 'global',
|
||||
'rollback_config': {'rollback': 'config'},
|
||||
'update_config': {'update': 'config'},
|
||||
'endpoint_spec': {'blah': 'blah'},
|
||||
}
|
||||
|
@ -57,5 +60,5 @@ class CreateServiceKwargsTest(unittest.TestCase):
|
|||
assert task_template['Networks'] == [{'Target': 'somenet'}]
|
||||
assert set(task_template['ContainerSpec'].keys()) == {
|
||||
'Image', 'Command', 'Args', 'Hostname', 'Env', 'Dir', 'User',
|
||||
'Labels', 'Mounts', 'StopGracePeriod'
|
||||
'Labels', 'Mounts', 'StopGracePeriod', 'Sysctls'
|
||||
}
|
||||
|
|
|
@ -1,15 +1,8 @@
|
|||
import unittest
|
||||
from docker.transport import ssladapter
|
||||
import pytest
|
||||
from ssl import match_hostname, CertificateError
|
||||
|
||||
try:
|
||||
from backports.ssl_match_hostname import (
|
||||
match_hostname, CertificateError
|
||||
)
|
||||
except ImportError:
|
||||
from ssl import (
|
||||
match_hostname, CertificateError
|
||||
)
|
||||
import pytest
|
||||
from docker.transport import ssladapter
|
||||
|
||||
try:
|
||||
from ssl import OP_NO_SSLv3, OP_NO_SSLv2, OP_NO_TLSv1
|
||||
|
|
|
@ -272,8 +272,8 @@ class ExcludePathsTest(unittest.TestCase):
|
|||
assert self.exclude(['**/target/*/*']) == convert_paths(
|
||||
self.all_paths - {
|
||||
'target/subdir/file.txt',
|
||||
'subdir/target/subdir/file.txt',
|
||||
'subdir/subdir2/target/subdir/file.txt'
|
||||
'subdir/target/subdir/file.txt',
|
||||
'subdir/subdir2/target/subdir/file.txt'
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -281,16 +281,16 @@ class ExcludePathsTest(unittest.TestCase):
|
|||
assert self.exclude(['subdir/**']) == convert_paths(
|
||||
self.all_paths - {
|
||||
'subdir/file.txt',
|
||||
'subdir/target/file.txt',
|
||||
'subdir/target/subdir/file.txt',
|
||||
'subdir/subdir2/file.txt',
|
||||
'subdir/subdir2/target/file.txt',
|
||||
'subdir/subdir2/target/subdir/file.txt',
|
||||
'subdir/target',
|
||||
'subdir/target/subdir',
|
||||
'subdir/subdir2',
|
||||
'subdir/subdir2/target',
|
||||
'subdir/subdir2/target/subdir'
|
||||
'subdir/target/file.txt',
|
||||
'subdir/target/subdir/file.txt',
|
||||
'subdir/subdir2/file.txt',
|
||||
'subdir/subdir2/target/file.txt',
|
||||
'subdir/subdir2/target/subdir/file.txt',
|
||||
'subdir/target',
|
||||
'subdir/target/subdir',
|
||||
'subdir/subdir2',
|
||||
'subdir/subdir2/target',
|
||||
'subdir/subdir2/target/subdir'
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -296,17 +296,24 @@ class ParseHostTest(unittest.TestCase):
|
|||
'[fd12::82d1]:2375/docker/engine': (
|
||||
'http://[fd12::82d1]:2375/docker/engine'
|
||||
),
|
||||
'ssh://[fd12::82d1]': 'ssh://[fd12::82d1]:22',
|
||||
'ssh://user@[fd12::82d1]:8765': 'ssh://user@[fd12::82d1]:8765',
|
||||
'ssh://': 'ssh://127.0.0.1:22',
|
||||
'ssh://user@localhost:22': 'ssh://user@localhost:22',
|
||||
'ssh://user@remote': 'ssh://user@remote:22',
|
||||
}
|
||||
|
||||
for host in invalid_hosts:
|
||||
with pytest.raises(DockerException):
|
||||
msg = f'Should have failed to parse invalid host: {host}'
|
||||
with self.assertRaises(DockerException, msg=msg):
|
||||
parse_host(host, None)
|
||||
|
||||
for host, expected in valid_hosts.items():
|
||||
assert parse_host(host, None) == expected
|
||||
self.assertEqual(
|
||||
parse_host(host, None),
|
||||
expected,
|
||||
msg=f'Failed to parse valid host: {host}',
|
||||
)
|
||||
|
||||
def test_parse_host_empty_value(self):
|
||||
unix_socket = 'http+unix:///var/run/docker.sock'
|
||||
|
|
Loading…
Reference in New Issue