mirror of https://github.com/docker/docker-py.git
Compare commits
66 Commits
Author | SHA1 | Date |
---|---|---|
|
6e6a273573 | |
|
526a9db743 | |
|
e5c3eb18b6 | |
|
820769e23c | |
|
db7f8b8bb6 | |
|
747d23b9d7 | |
|
fad84c371a | |
|
5a8a42466e | |
|
03e43be6af | |
|
80a584651b | |
|
8ee28517c7 | |
|
d9f9b965b2 | |
|
fba6ffe297 | |
|
99ce2e6d56 | |
|
504ce6193c | |
|
bb0edd1f66 | |
|
e47e966e94 | |
|
a8bac88221 | |
|
e031cf0c23 | |
|
b1265470e6 | |
|
6bbf741c8c | |
|
96ef4d3bee | |
|
a3652028b1 | |
|
1ab40c8e92 | |
|
b33088e0ca | |
|
45488acfc1 | |
|
20879eca6a | |
|
4f2a26d21e | |
|
7785ad913d | |
|
d8e9bcb278 | |
|
2a059a9f19 | |
|
e33e0a437e | |
|
b86573a3e3 | |
|
e34bcf20d9 | |
|
205d2f2bd0 | |
|
b6464dbed9 | |
|
9ad4bddc9e | |
|
336e65fc3c | |
|
4c6437d292 | |
|
0fd79c8c0d | |
|
3d79ce8c60 | |
|
dd82f9ae8e | |
|
e91b280074 | |
|
cb21af7f69 | |
|
1818712b8c | |
|
d50cc429c2 | |
|
047df6b0d3 | |
|
ae45d477c4 | |
|
f128956034 | |
|
bd164f928a | |
|
249654d4d9 | |
|
694d9792e6 | |
|
eeb9ea1937 | |
|
08956b5fbc | |
|
b8a6987cd5 | |
|
f467fd9df9 | |
|
3ec5a6849a | |
|
1784cc2962 | |
|
6ceb08273c | |
|
097382b973 | |
|
0fad869cc6 | |
|
2a5f354b50 | |
|
7d8a161b12 | |
|
5388413dde | |
|
3d0a3f1d77 | |
|
a9b5494fd0 |
|
@ -11,13 +11,26 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- run: pip install -U ruff==0.0.284
|
- run: pip install -U ruff==0.1.8
|
||||||
- name: Run ruff
|
- name: Run ruff
|
||||||
run: ruff docker tests
|
run: ruff docker tests
|
||||||
|
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.x'
|
||||||
|
- run: pip3 install build && python -m build .
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist
|
||||||
|
path: dist
|
||||||
|
|
||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
|
@ -27,14 +40,14 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
allow-prereleases: true
|
allow-prereleases: true
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install --upgrade pip
|
python3 -m pip install --upgrade pip
|
||||||
pip3 install -r test-requirements.txt -r requirements.txt
|
pip3 install '.[ssh,dev]'
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
run: |
|
run: |
|
||||||
docker logout
|
docker logout
|
||||||
|
@ -49,6 +62,9 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
fetch-tags: true
|
||||||
- name: make ${{ matrix.variant }}
|
- name: make ${{ matrix.variant }}
|
||||||
run: |
|
run: |
|
||||||
docker logout
|
docker logout
|
||||||
|
|
|
@ -22,16 +22,18 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
|
|
||||||
- name: Generate Pacakge
|
- name: Generate Package
|
||||||
run: |
|
run: |
|
||||||
pip3 install setuptools wheel
|
pip3 install build
|
||||||
python setup.py sdist bdist_wheel
|
python -m build .
|
||||||
env:
|
env:
|
||||||
SETUPTOOLS_SCM_PRETEND_VERSION_FOR_DOCKER: ${{ inputs.tag }}
|
# This is also supported by Hatch; see
|
||||||
|
# https://github.com/ofek/hatch-vcs#version-source-environment-variables
|
||||||
|
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ inputs.tag }}
|
||||||
|
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
|
|
@ -4,14 +4,14 @@ sphinx:
|
||||||
configuration: docs/conf.py
|
configuration: docs/conf.py
|
||||||
|
|
||||||
build:
|
build:
|
||||||
os: ubuntu-20.04
|
os: ubuntu-22.04
|
||||||
tools:
|
tools:
|
||||||
python: '3.10'
|
python: '3.12'
|
||||||
|
|
||||||
python:
|
python:
|
||||||
install:
|
install:
|
||||||
- requirements: docs-requirements.txt
|
|
||||||
- method: pip
|
- method: pip
|
||||||
path: .
|
path: .
|
||||||
extra_requirements:
|
extra_requirements:
|
||||||
- ssh
|
- ssh
|
||||||
|
- docs
|
||||||
|
|
16
Dockerfile
16
Dockerfile
|
@ -1,17 +1,13 @@
|
||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
ARG PYTHON_VERSION=3.12
|
ARG PYTHON_VERSION=3.12
|
||||||
|
|
||||||
FROM python:${PYTHON_VERSION}
|
FROM python:${PYTHON_VERSION}
|
||||||
|
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
|
|
||||||
COPY requirements.txt /src/requirements.txt
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
COPY test-requirements.txt /src/test-requirements.txt
|
|
||||||
RUN pip install --no-cache-dir -r test-requirements.txt
|
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
ARG SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER
|
|
||||||
RUN pip install --no-cache-dir .
|
ARG VERSION=0.0.0.dev0
|
||||||
|
RUN --mount=type=cache,target=/cache/pip \
|
||||||
|
PIP_CACHE_DIR=/cache/pip \
|
||||||
|
SETUPTOOLS_SCM_PRETEND_VERSION=${VERSION} \
|
||||||
|
pip install .[ssh]
|
||||||
|
|
|
@ -11,7 +11,12 @@ RUN addgroup --gid $gid sphinx \
|
||||||
&& useradd --uid $uid --gid $gid -M sphinx
|
&& useradd --uid $uid --gid $gid -M sphinx
|
||||||
|
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
COPY requirements.txt docs-requirements.txt ./
|
COPY . .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt -r docs-requirements.txt
|
|
||||||
|
ARG VERSION=0.0.0.dev0
|
||||||
|
RUN --mount=type=cache,target=/cache/pip \
|
||||||
|
PIP_CACHE_DIR=/cache/pip \
|
||||||
|
SETUPTOOLS_SCM_PRETEND_VERSION=${VERSION} \
|
||||||
|
pip install .[ssh,docs]
|
||||||
|
|
||||||
USER sphinx
|
USER sphinx
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
include test-requirements.txt
|
|
||||||
include requirements.txt
|
|
||||||
include README.md
|
|
||||||
include README.rst
|
|
||||||
include LICENSE
|
|
||||||
recursive-include tests *.py
|
|
||||||
recursive-include tests/unit/testdata *
|
|
||||||
recursive-include tests/integration/testdata *
|
|
||||||
recursive-include tests/gpg-keys *
|
|
67
Makefile
67
Makefile
|
@ -1,5 +1,5 @@
|
||||||
TEST_API_VERSION ?= 1.41
|
TEST_API_VERSION ?= 1.45
|
||||||
TEST_ENGINE_VERSION ?= 20.10
|
TEST_ENGINE_VERSION ?= 26.1
|
||||||
|
|
||||||
ifeq ($(OS),Windows_NT)
|
ifeq ($(OS),Windows_NT)
|
||||||
PLATFORM := Windows
|
PLATFORM := Windows
|
||||||
|
@ -11,12 +11,17 @@ ifeq ($(PLATFORM),Linux)
|
||||||
uid_args := "--build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g)"
|
uid_args := "--build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g)"
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER ?= $(shell git describe --match '[0-9]*' --dirty='.m' --always --tags 2>/dev/null | sed -r 's/-([0-9]+)/.dev\1/' | sed 's/-/+/')
|
||||||
|
ifeq ($(SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER),)
|
||||||
|
SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER = "0.0.0.dev0"
|
||||||
|
endif
|
||||||
|
|
||||||
.PHONY: all
|
.PHONY: all
|
||||||
all: test
|
all: test
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean:
|
clean:
|
||||||
-docker rm -f dpy-dind-py3 dpy-dind-certs dpy-dind-ssl
|
-docker rm -f dpy-dind dpy-dind-certs dpy-dind-ssl
|
||||||
find -name "__pycache__" | xargs rm -rf
|
find -name "__pycache__" | xargs rm -rf
|
||||||
|
|
||||||
.PHONY: build-dind-ssh
|
.PHONY: build-dind-ssh
|
||||||
|
@ -25,35 +30,46 @@ build-dind-ssh:
|
||||||
--pull \
|
--pull \
|
||||||
-t docker-dind-ssh \
|
-t docker-dind-ssh \
|
||||||
-f tests/Dockerfile-ssh-dind \
|
-f tests/Dockerfile-ssh-dind \
|
||||||
|
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
|
||||||
--build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} \
|
--build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} \
|
||||||
--build-arg API_VERSION=${TEST_API_VERSION} \
|
--build-arg API_VERSION=${TEST_API_VERSION} \
|
||||||
--build-arg APT_MIRROR .
|
.
|
||||||
|
|
||||||
.PHONY: build-py3
|
.PHONY: build
|
||||||
build-py3:
|
build:
|
||||||
docker build \
|
docker build \
|
||||||
--pull \
|
--pull \
|
||||||
-t docker-sdk-python3 \
|
-t docker-sdk-python3 \
|
||||||
-f tests/Dockerfile \
|
-f tests/Dockerfile \
|
||||||
--build-arg APT_MIRROR .
|
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
|
||||||
|
.
|
||||||
|
|
||||||
.PHONY: build-docs
|
.PHONY: build-docs
|
||||||
build-docs:
|
build-docs:
|
||||||
docker build -t docker-sdk-python-docs -f Dockerfile-docs $(uid_args) .
|
docker build \
|
||||||
|
-t docker-sdk-python-docs \
|
||||||
|
-f Dockerfile-docs \
|
||||||
|
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
|
||||||
|
$(uid_args) \
|
||||||
|
.
|
||||||
|
|
||||||
.PHONY: build-dind-certs
|
.PHONY: build-dind-certs
|
||||||
build-dind-certs:
|
build-dind-certs:
|
||||||
docker build -t dpy-dind-certs -f tests/Dockerfile-dind-certs .
|
docker build \
|
||||||
|
-t dpy-dind-certs \
|
||||||
|
-f tests/Dockerfile-dind-certs \
|
||||||
|
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
|
||||||
|
.
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
test: ruff unit-test-py3 integration-dind integration-dind-ssl
|
test: ruff unit-test integration-dind integration-dind-ssl
|
||||||
|
|
||||||
.PHONY: unit-test-py3
|
.PHONY: unit-test
|
||||||
unit-test-py3: build-py3
|
unit-test: build
|
||||||
docker run -t --rm docker-sdk-python3 py.test tests/unit
|
docker run -t --rm docker-sdk-python3 py.test tests/unit
|
||||||
|
|
||||||
.PHONY: integration-test-py3
|
.PHONY: integration-test
|
||||||
integration-test-py3: build-py3
|
integration-test: build
|
||||||
docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file}
|
docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file}
|
||||||
|
|
||||||
.PHONY: setup-network
|
.PHONY: setup-network
|
||||||
|
@ -61,15 +77,12 @@ setup-network:
|
||||||
docker network inspect dpy-tests || docker network create dpy-tests
|
docker network inspect dpy-tests || docker network create dpy-tests
|
||||||
|
|
||||||
.PHONY: integration-dind
|
.PHONY: integration-dind
|
||||||
integration-dind: integration-dind-py3
|
integration-dind: build setup-network
|
||||||
|
docker rm -vf dpy-dind || :
|
||||||
.PHONY: integration-dind-py3
|
|
||||||
integration-dind-py3: build-py3 setup-network
|
|
||||||
docker rm -vf dpy-dind-py3 || :
|
|
||||||
|
|
||||||
docker run \
|
docker run \
|
||||||
--detach \
|
--detach \
|
||||||
--name dpy-dind-py3 \
|
--name dpy-dind \
|
||||||
--network dpy-tests \
|
--network dpy-tests \
|
||||||
--pull=always \
|
--pull=always \
|
||||||
--privileged \
|
--privileged \
|
||||||
|
@ -82,10 +95,10 @@ integration-dind-py3: build-py3 setup-network
|
||||||
--rm \
|
--rm \
|
||||||
--tty \
|
--tty \
|
||||||
busybox \
|
busybox \
|
||||||
sh -c 'while ! nc -z dpy-dind-py3 2375; do sleep 1; done'
|
sh -c 'while ! nc -z dpy-dind 2375; do sleep 1; done'
|
||||||
|
|
||||||
docker run \
|
docker run \
|
||||||
--env="DOCKER_HOST=tcp://dpy-dind-py3:2375" \
|
--env="DOCKER_HOST=tcp://dpy-dind:2375" \
|
||||||
--env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
|
--env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
|
||||||
--network dpy-tests \
|
--network dpy-tests \
|
||||||
--rm \
|
--rm \
|
||||||
|
@ -93,11 +106,11 @@ integration-dind-py3: build-py3 setup-network
|
||||||
docker-sdk-python3 \
|
docker-sdk-python3 \
|
||||||
py.test tests/integration/${file}
|
py.test tests/integration/${file}
|
||||||
|
|
||||||
docker rm -vf dpy-dind-py3
|
docker rm -vf dpy-dind
|
||||||
|
|
||||||
|
|
||||||
.PHONY: integration-dind-ssh
|
.PHONY: integration-dind-ssh
|
||||||
integration-dind-ssh: build-dind-ssh build-py3 setup-network
|
integration-dind-ssh: build-dind-ssh build setup-network
|
||||||
docker rm -vf dpy-dind-ssh || :
|
docker rm -vf dpy-dind-ssh || :
|
||||||
docker run -d --network dpy-tests --name dpy-dind-ssh --privileged \
|
docker run -d --network dpy-tests --name dpy-dind-ssh --privileged \
|
||||||
docker-dind-ssh dockerd --experimental
|
docker-dind-ssh dockerd --experimental
|
||||||
|
@ -116,7 +129,7 @@ integration-dind-ssh: build-dind-ssh build-py3 setup-network
|
||||||
|
|
||||||
|
|
||||||
.PHONY: integration-dind-ssl
|
.PHONY: integration-dind-ssl
|
||||||
integration-dind-ssl: build-dind-certs build-py3 setup-network
|
integration-dind-ssl: build-dind-certs build setup-network
|
||||||
docker rm -vf dpy-dind-certs dpy-dind-ssl || :
|
docker rm -vf dpy-dind-certs dpy-dind-ssl || :
|
||||||
docker run -d --name dpy-dind-certs dpy-dind-certs
|
docker run -d --name dpy-dind-certs dpy-dind-certs
|
||||||
|
|
||||||
|
@ -164,7 +177,7 @@ integration-dind-ssl: build-dind-certs build-py3 setup-network
|
||||||
docker rm -vf dpy-dind-ssl dpy-dind-certs
|
docker rm -vf dpy-dind-ssl dpy-dind-certs
|
||||||
|
|
||||||
.PHONY: ruff
|
.PHONY: ruff
|
||||||
ruff: build-py3
|
ruff: build
|
||||||
docker run -t --rm docker-sdk-python3 ruff docker tests
|
docker run -t --rm docker-sdk-python3 ruff docker tests
|
||||||
|
|
||||||
.PHONY: docs
|
.PHONY: docs
|
||||||
|
@ -172,5 +185,5 @@ docs: build-docs
|
||||||
docker run --rm -t -v `pwd`:/src docker-sdk-python-docs sphinx-build docs docs/_build
|
docker run --rm -t -v `pwd`:/src docker-sdk-python-docs sphinx-build docs docs/_build
|
||||||
|
|
||||||
.PHONY: shell
|
.PHONY: shell
|
||||||
shell: build-py3
|
shell: build
|
||||||
docker run -it -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 python
|
docker run -it -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 python
|
||||||
|
|
|
@ -6,7 +6,7 @@ A Python library for the Docker Engine API. It lets you do anything the `docker`
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
The latest stable version [is available on PyPI](https://pypi.python.org/pypi/docker/). Either add `docker` to your `requirements.txt` file or install with pip:
|
The latest stable version [is available on PyPI](https://pypi.python.org/pypi/docker/). Install with pip:
|
||||||
|
|
||||||
pip install docker
|
pip install docker
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from .api import APIClient
|
from .api import APIClient
|
||||||
from .client import DockerClient, from_env
|
from .client import DockerClient, from_env
|
||||||
from .context import Context
|
from .context import Context, ContextAPI
|
||||||
from .context import ContextAPI
|
|
||||||
from .tls import TLSConfig
|
from .tls import TLSConfig
|
||||||
from .version import __version__
|
from .version import __version__
|
||||||
|
|
||||||
|
|
|
@ -3,11 +3,7 @@ import logging
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from .. import auth
|
from .. import auth, constants, errors, utils
|
||||||
from .. import constants
|
|
||||||
from .. import errors
|
|
||||||
from .. import utils
|
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -129,13 +125,16 @@ class BuildApiMixin:
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
'Can not use custom encoding if gzip is enabled'
|
'Can not use custom encoding if gzip is enabled'
|
||||||
)
|
)
|
||||||
|
if tag is not None:
|
||||||
|
if not utils.match_tag(tag):
|
||||||
|
raise errors.DockerException(
|
||||||
|
f"invalid tag '{tag}': invalid reference format"
|
||||||
|
)
|
||||||
for key in container_limits.keys():
|
for key in container_limits.keys():
|
||||||
if key not in constants.CONTAINER_LIMITS_KEYS:
|
if key not in constants.CONTAINER_LIMITS_KEYS:
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
f'Invalid container_limits key {key}'
|
f"invalid tag '{tag}': invalid reference format"
|
||||||
)
|
)
|
||||||
|
|
||||||
if custom_context:
|
if custom_context:
|
||||||
if not fileobj:
|
if not fileobj:
|
||||||
raise TypeError("You must specify fileobj with custom_context")
|
raise TypeError("You must specify fileobj with custom_context")
|
||||||
|
@ -276,10 +275,24 @@ class BuildApiMixin:
|
||||||
return self._stream_helper(response, decode=decode)
|
return self._stream_helper(response, decode=decode)
|
||||||
|
|
||||||
@utils.minimum_version('1.31')
|
@utils.minimum_version('1.31')
|
||||||
def prune_builds(self):
|
def prune_builds(self, filters=None, keep_storage=None, all=None):
|
||||||
"""
|
"""
|
||||||
Delete the builder cache
|
Delete the builder cache
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filters (dict): Filters to process on the prune list.
|
||||||
|
Needs Docker API v1.39+
|
||||||
|
Available filters:
|
||||||
|
- dangling (bool): When set to true (or 1), prune only
|
||||||
|
unused and untagged images.
|
||||||
|
- until (str): Can be Unix timestamps, date formatted
|
||||||
|
timestamps, or Go duration strings (e.g. 10m, 1h30m) computed
|
||||||
|
relative to the daemon's local time.
|
||||||
|
keep_storage (int): Amount of disk space in bytes to keep for cache.
|
||||||
|
Needs Docker API v1.39+
|
||||||
|
all (bool): Remove all types of build cache.
|
||||||
|
Needs Docker API v1.39+
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(dict): A dictionary containing information about the operation's
|
(dict): A dictionary containing information about the operation's
|
||||||
result. The ``SpaceReclaimed`` key indicates the amount of
|
result. The ``SpaceReclaimed`` key indicates the amount of
|
||||||
|
@ -290,7 +303,20 @@ class BuildApiMixin:
|
||||||
If the server returns an error.
|
If the server returns an error.
|
||||||
"""
|
"""
|
||||||
url = self._url("/build/prune")
|
url = self._url("/build/prune")
|
||||||
return self._result(self._post(url), True)
|
if (filters, keep_storage, all) != (None, None, None) \
|
||||||
|
and utils.version_lt(self._version, '1.39'):
|
||||||
|
raise errors.InvalidVersion(
|
||||||
|
'`filters`, `keep_storage`, and `all` args are only available '
|
||||||
|
'for API version > 1.38'
|
||||||
|
)
|
||||||
|
params = {}
|
||||||
|
if filters is not None:
|
||||||
|
params['filters'] = utils.convert_filters(filters)
|
||||||
|
if keep_storage is not None:
|
||||||
|
params['keep-storage'] = keep_storage
|
||||||
|
if all is not None:
|
||||||
|
params['all'] = all
|
||||||
|
return self._result(self._post(url, params=params), True)
|
||||||
|
|
||||||
def _set_auth_headers(self, headers):
|
def _set_auth_headers(self, headers):
|
||||||
log.debug('Looking for auth config')
|
log.debug('Looking for auth config')
|
||||||
|
|
|
@ -8,12 +8,22 @@ import requests.adapters
|
||||||
import requests.exceptions
|
import requests.exceptions
|
||||||
|
|
||||||
from .. import auth
|
from .. import auth
|
||||||
from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH,
|
from ..constants import (
|
||||||
DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS,
|
DEFAULT_MAX_POOL_SIZE,
|
||||||
DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM,
|
DEFAULT_NUM_POOLS,
|
||||||
MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES)
|
DEFAULT_NUM_POOLS_SSH,
|
||||||
from ..errors import (DockerException, InvalidVersion, TLSParameterError,
|
DEFAULT_TIMEOUT_SECONDS,
|
||||||
create_api_error_from_http_exception)
|
DEFAULT_USER_AGENT,
|
||||||
|
IS_WINDOWS_PLATFORM,
|
||||||
|
MINIMUM_DOCKER_API_VERSION,
|
||||||
|
STREAM_HEADER_SIZE_BYTES,
|
||||||
|
)
|
||||||
|
from ..errors import (
|
||||||
|
DockerException,
|
||||||
|
InvalidVersion,
|
||||||
|
TLSParameterError,
|
||||||
|
create_api_error_from_http_exception,
|
||||||
|
)
|
||||||
from ..tls import TLSConfig
|
from ..tls import TLSConfig
|
||||||
from ..transport import UnixHTTPAdapter
|
from ..transport import UnixHTTPAdapter
|
||||||
from ..utils import check_resource, config, update_headers, utils
|
from ..utils import check_resource, config, update_headers, utils
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from .. import errors
|
from .. import errors, utils
|
||||||
from .. import utils
|
|
||||||
from ..constants import DEFAULT_DATA_CHUNK_SIZE
|
from ..constants import DEFAULT_DATA_CHUNK_SIZE
|
||||||
from ..types import CancellableStream
|
from ..types import (
|
||||||
from ..types import ContainerConfig
|
CancellableStream,
|
||||||
from ..types import EndpointConfig
|
ContainerConfig,
|
||||||
from ..types import HostConfig
|
EndpointConfig,
|
||||||
from ..types import NetworkingConfig
|
HostConfig,
|
||||||
|
NetworkingConfig,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ContainerApiMixin:
|
class ContainerApiMixin:
|
||||||
|
@ -843,7 +844,7 @@ class ContainerApiMixin:
|
||||||
float (in fractional seconds)
|
float (in fractional seconds)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(generator or str)
|
(generator of bytes or bytes)
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
:py:class:`docker.errors.APIError`
|
:py:class:`docker.errors.APIError`
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
from .. import errors
|
from .. import errors, utils
|
||||||
from .. import utils
|
|
||||||
from ..types import CancellableStream
|
from ..types import CancellableStream
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,7 @@ class ImageApiMixin:
|
||||||
image (str): The image to show history for
|
image (str): The image to show history for
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(str): The history of the image
|
(list): The history of the image
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
:py:class:`docker.errors.APIError`
|
:py:class:`docker.errors.APIError`
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from ..errors import InvalidVersion
|
|
||||||
from ..utils import check_resource, minimum_version
|
|
||||||
from ..utils import version_lt
|
|
||||||
from .. import utils
|
from .. import utils
|
||||||
|
from ..errors import InvalidVersion
|
||||||
|
from ..utils import check_resource, minimum_version, version_lt
|
||||||
|
|
||||||
|
|
||||||
class NetworkApiMixin:
|
class NetworkApiMixin:
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
from .. import errors
|
from .. import errors, utils
|
||||||
from .. import utils
|
|
||||||
|
|
||||||
|
|
||||||
class SecretApiMixin:
|
class SecretApiMixin:
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
import logging
|
|
||||||
import http.client as http_client
|
import http.client as http_client
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .. import errors, types, utils
|
||||||
from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE
|
from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE
|
||||||
from .. import errors
|
|
||||||
from .. import types
|
|
||||||
from .. import utils
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
from .. import errors
|
from .. import errors, utils
|
||||||
from .. import utils
|
|
||||||
|
|
||||||
|
|
||||||
class VolumeApiMixin:
|
class VolumeApiMixin:
|
||||||
|
|
|
@ -2,8 +2,7 @@ import base64
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from . import credentials
|
from . import credentials, errors
|
||||||
from . import errors
|
|
||||||
from .utils import config
|
from .utils import config
|
||||||
|
|
||||||
INDEX_NAME = 'docker.io'
|
INDEX_NAME = 'docker.io'
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from .api.client import APIClient
|
from .api.client import APIClient
|
||||||
from .constants import (DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE)
|
from .constants import DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS
|
||||||
from .models.configs import ConfigCollection
|
from .models.configs import ConfigCollection
|
||||||
from .models.containers import ContainerCollection
|
from .models.containers import ContainerCollection
|
||||||
from .models.images import ImageCollection
|
from .models.images import ImageCollection
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from .version import __version__
|
from .version import __version__
|
||||||
|
|
||||||
DEFAULT_DOCKER_API_VERSION = '1.41'
|
DEFAULT_DOCKER_API_VERSION = '1.45'
|
||||||
MINIMUM_DOCKER_API_VERSION = '1.21'
|
MINIMUM_DOCKER_API_VERSION = '1.24'
|
||||||
DEFAULT_TIMEOUT_SECONDS = 60
|
DEFAULT_TIMEOUT_SECONDS = 60
|
||||||
STREAM_HEADER_SIZE_BYTES = 8
|
STREAM_HEADER_SIZE_BYTES = 8
|
||||||
CONTAINER_LIMITS_KEYS = [
|
CONTAINER_LIMITS_KEYS = [
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
from .context import Context
|
|
||||||
from .api import ContextAPI
|
from .api import ContextAPI
|
||||||
|
from .context import Context
|
||||||
|
|
|
@ -2,11 +2,14 @@ import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from docker import errors
|
from docker import errors
|
||||||
from docker.context.config import get_meta_dir
|
|
||||||
from docker.context.config import METAFILE
|
from .config import (
|
||||||
from docker.context.config import get_current_context_name
|
METAFILE,
|
||||||
from docker.context.config import write_context_name_to_docker_config
|
get_current_context_name,
|
||||||
from docker.context import Context
|
get_meta_dir,
|
||||||
|
write_context_name_to_docker_config,
|
||||||
|
)
|
||||||
|
from .context import Context
|
||||||
|
|
||||||
|
|
||||||
class ContextAPI:
|
class ContextAPI:
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
from docker import utils
|
from docker import utils
|
||||||
from docker.constants import IS_WINDOWS_PLATFORM
|
from docker.constants import DEFAULT_UNIX_SOCKET, IS_WINDOWS_PLATFORM
|
||||||
from docker.constants import DEFAULT_UNIX_SOCKET
|
|
||||||
from docker.utils.config import find_config_file
|
from docker.utils.config import find_config_file
|
||||||
|
|
||||||
METAFILE = "meta.json"
|
METAFILE = "meta.json"
|
||||||
|
|
|
@ -1,12 +1,16 @@
|
||||||
import os
|
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
from shutil import copyfile, rmtree
|
from shutil import copyfile, rmtree
|
||||||
from docker.tls import TLSConfig
|
|
||||||
from docker.errors import ContextException
|
from docker.errors import ContextException
|
||||||
from docker.context.config import get_meta_dir
|
from docker.tls import TLSConfig
|
||||||
from docker.context.config import get_meta_file
|
|
||||||
from docker.context.config import get_tls_dir
|
from .config import (
|
||||||
from docker.context.config import get_context_host
|
get_context_host,
|
||||||
|
get_meta_dir,
|
||||||
|
get_meta_file,
|
||||||
|
get_tls_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Context:
|
class Context:
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
from .store import Store
|
|
||||||
from .errors import StoreError, CredentialsNotFound
|
|
||||||
from .constants import (
|
from .constants import (
|
||||||
DEFAULT_LINUX_STORE,
|
DEFAULT_LINUX_STORE,
|
||||||
DEFAULT_OSX_STORE,
|
DEFAULT_OSX_STORE,
|
||||||
DEFAULT_WIN32_STORE,
|
DEFAULT_WIN32_STORE,
|
||||||
PROGRAM_PREFIX,
|
PROGRAM_PREFIX,
|
||||||
)
|
)
|
||||||
|
from .errors import CredentialsNotFound, StoreError
|
||||||
|
from .store import Store
|
||||||
|
|
|
@ -4,8 +4,7 @@ import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from . import constants
|
from . import constants, errors
|
||||||
from . import errors
|
|
||||||
from .utils import create_environment_dict
|
from .utils import create_environment_dict
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from ..api import APIClient
|
from ..api import APIClient
|
||||||
from .resource import Model, Collection
|
from .resource import Collection, Model
|
||||||
|
|
||||||
|
|
||||||
class Config(Model):
|
class Config(Model):
|
||||||
|
@ -30,6 +30,7 @@ class ConfigCollection(Collection):
|
||||||
|
|
||||||
def create(self, **kwargs):
|
def create(self, **kwargs):
|
||||||
obj = self.client.api.create_config(**kwargs)
|
obj = self.client.api.create_config(**kwargs)
|
||||||
|
obj.setdefault("Spec", {})["Name"] = kwargs.get("name")
|
||||||
return self.prepare_model(obj)
|
return self.prepare_model(obj)
|
||||||
create.__doc__ = APIClient.create_config.__doc__
|
create.__doc__ = APIClient.create_config.__doc__
|
||||||
|
|
||||||
|
|
|
@ -2,16 +2,19 @@ import copy
|
||||||
import ntpath
|
import ntpath
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
from .images import Image
|
|
||||||
from .resource import Collection, Model
|
|
||||||
from ..api import APIClient
|
from ..api import APIClient
|
||||||
from ..constants import DEFAULT_DATA_CHUNK_SIZE
|
from ..constants import DEFAULT_DATA_CHUNK_SIZE
|
||||||
from ..errors import (
|
from ..errors import (
|
||||||
ContainerError, DockerException, ImageNotFound,
|
ContainerError,
|
||||||
NotFound, create_unexpected_kwargs_error
|
DockerException,
|
||||||
|
ImageNotFound,
|
||||||
|
NotFound,
|
||||||
|
create_unexpected_kwargs_error,
|
||||||
)
|
)
|
||||||
from ..types import HostConfig, NetworkingConfig
|
from ..types import HostConfig, NetworkingConfig
|
||||||
from ..utils import version_gte
|
from ..utils import version_gte
|
||||||
|
from .images import Image
|
||||||
|
from .resource import Collection, Model
|
||||||
|
|
||||||
|
|
||||||
class Container(Model):
|
class Container(Model):
|
||||||
|
@ -178,7 +181,8 @@ class Container(Model):
|
||||||
user (str): User to execute command as. Default: root
|
user (str): User to execute command as. Default: root
|
||||||
detach (bool): If true, detach from the exec command.
|
detach (bool): If true, detach from the exec command.
|
||||||
Default: False
|
Default: False
|
||||||
stream (bool): Stream response data. Default: False
|
stream (bool): Stream response data. Ignored if ``detach`` is true.
|
||||||
|
Default: False
|
||||||
socket (bool): Return the connection socket to allow custom
|
socket (bool): Return the connection socket to allow custom
|
||||||
read/write operations. Default: False
|
read/write operations. Default: False
|
||||||
environment (dict or list): A dictionary or a list of strings in
|
environment (dict or list): A dictionary or a list of strings in
|
||||||
|
@ -310,7 +314,7 @@ class Container(Model):
|
||||||
float (in nanoseconds)
|
float (in nanoseconds)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(generator or str): Logs from the container.
|
(generator of bytes or bytes): Logs from the container.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
:py:class:`docker.errors.APIError`
|
:py:class:`docker.errors.APIError`
|
||||||
|
@ -903,9 +907,9 @@ class ContainerCollection(Collection):
|
||||||
container, exit_status, command, image, out
|
container, exit_status, command, image, out
|
||||||
)
|
)
|
||||||
|
|
||||||
return out if stream or out is None else b''.join(
|
if stream or out is None:
|
||||||
[line for line in out]
|
return out
|
||||||
)
|
return b''.join(out)
|
||||||
|
|
||||||
def create(self, image, command=None, **kwargs):
|
def create(self, image, command=None, **kwargs):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -51,7 +51,7 @@ class Image(Model):
|
||||||
Show the history of an image.
|
Show the history of an image.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(str): The history of the image.
|
(list): The history of the image.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
:py:class:`docker.errors.APIError`
|
:py:class:`docker.errors.APIError`
|
||||||
|
@ -407,8 +407,8 @@ class ImageCollection(Collection):
|
||||||
if match:
|
if match:
|
||||||
image_id = match.group(2)
|
image_id = match.group(2)
|
||||||
images.append(image_id)
|
images.append(image_id)
|
||||||
if 'error' in chunk:
|
if 'errorDetail' in chunk:
|
||||||
raise ImageLoadError(chunk['error'])
|
raise ImageLoadError(chunk['errorDetail']['message'])
|
||||||
|
|
||||||
return [self.get(i) for i in images]
|
return [self.get(i) for i in images]
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from ..api import APIClient
|
from ..api import APIClient
|
||||||
from ..utils import version_gte
|
from ..utils import version_gte
|
||||||
from .containers import Container
|
from .containers import Container
|
||||||
from .resource import Model, Collection
|
from .resource import Collection, Model
|
||||||
|
|
||||||
|
|
||||||
class Network(Model):
|
class Network(Model):
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from .resource import Model, Collection
|
from .resource import Collection, Model
|
||||||
|
|
||||||
|
|
||||||
class Node(Model):
|
class Node(Model):
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from ..api import APIClient
|
from ..api import APIClient
|
||||||
from .resource import Model, Collection
|
from .resource import Collection, Model
|
||||||
|
|
||||||
|
|
||||||
class Secret(Model):
|
class Secret(Model):
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
import copy
|
import copy
|
||||||
from docker.errors import create_unexpected_kwargs_error, InvalidArgument
|
|
||||||
from docker.types import TaskTemplate, ContainerSpec, Placement, ServiceMode
|
from docker.errors import InvalidArgument, create_unexpected_kwargs_error
|
||||||
from .resource import Model, Collection
|
from docker.types import ContainerSpec, Placement, ServiceMode, TaskTemplate
|
||||||
|
|
||||||
|
from .resource import Collection, Model
|
||||||
|
|
||||||
|
|
||||||
class Service(Model):
|
class Service(Model):
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from docker.api import APIClient
|
from docker.api import APIClient
|
||||||
from docker.errors import APIError
|
from docker.errors import APIError
|
||||||
|
|
||||||
from .resource import Model
|
from .resource import Model
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from ..api import APIClient
|
from ..api import APIClient
|
||||||
from .resource import Model, Collection
|
from .resource import Collection, Model
|
||||||
|
|
||||||
|
|
||||||
class Volume(Model):
|
class Volume(Model):
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from .unixconn import UnixHTTPAdapter
|
from .unixconn import UnixHTTPAdapter
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from .npipeconn import NpipeHTTPAdapter
|
from .npipeconn import NpipeHTTPAdapter
|
||||||
from .npipesocket import NpipeSocket
|
from .npipesocket import NpipeSocket
|
||||||
|
|
|
@ -6,3 +6,8 @@ class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
|
||||||
super().close()
|
super().close()
|
||||||
if hasattr(self, 'pools'):
|
if hasattr(self, 'pools'):
|
||||||
self.pools.clear()
|
self.pools.clear()
|
||||||
|
|
||||||
|
# Fix for requests 2.32.2+:
|
||||||
|
# https://github.com/psf/requests/commit/c98e4d133ef29c46a9b68cd783087218a8075e05
|
||||||
|
def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
|
||||||
|
return self.get_connection(request.url, proxies)
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
import queue
|
import queue
|
||||||
|
|
||||||
import requests.adapters
|
import requests.adapters
|
||||||
|
|
||||||
from docker.transport.basehttpadapter import BaseHTTPAdapter
|
|
||||||
from .. import constants
|
|
||||||
from .npipesocket import NpipeSocket
|
|
||||||
|
|
||||||
import urllib3
|
import urllib3
|
||||||
import urllib3.connection
|
import urllib3.connection
|
||||||
|
|
||||||
|
from .. import constants
|
||||||
|
from .basehttpadapter import BaseHTTPAdapter
|
||||||
|
from .npipesocket import NpipeSocket
|
||||||
|
|
||||||
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
|
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
import functools
|
import functools
|
||||||
import time
|
|
||||||
import io
|
import io
|
||||||
|
import time
|
||||||
|
|
||||||
|
import pywintypes
|
||||||
|
import win32api
|
||||||
|
import win32event
|
||||||
import win32file
|
import win32file
|
||||||
import win32pipe
|
import win32pipe
|
||||||
import pywintypes
|
|
||||||
import win32event
|
|
||||||
import win32api
|
|
||||||
|
|
||||||
cERROR_PIPE_BUSY = 0xe7
|
cERROR_PIPE_BUSY = 0xe7
|
||||||
cSECURITY_SQOS_PRESENT = 0x100000
|
cSECURITY_SQOS_PRESENT = 0x100000
|
||||||
|
|
|
@ -1,19 +1,19 @@
|
||||||
import paramiko
|
|
||||||
import queue
|
|
||||||
import urllib.parse
|
|
||||||
import requests.adapters
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import queue
|
||||||
import signal
|
import signal
|
||||||
import socket
|
import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
from docker.transport.basehttpadapter import BaseHTTPAdapter
|
import paramiko
|
||||||
from .. import constants
|
import requests.adapters
|
||||||
|
|
||||||
import urllib3
|
import urllib3
|
||||||
import urllib3.connection
|
import urllib3.connection
|
||||||
|
|
||||||
|
from .. import constants
|
||||||
|
from .basehttpadapter import BaseHTTPAdapter
|
||||||
|
|
||||||
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
|
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
import requests.adapters
|
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
from docker.transport.basehttpadapter import BaseHTTPAdapter
|
import requests.adapters
|
||||||
from .. import constants
|
|
||||||
|
|
||||||
import urllib3
|
import urllib3
|
||||||
import urllib3.connection
|
import urllib3.connection
|
||||||
|
|
||||||
|
from .. import constants
|
||||||
|
from .basehttpadapter import BaseHTTPAdapter
|
||||||
|
|
||||||
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
|
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,24 @@
|
||||||
from .containers import (
|
from .containers import ContainerConfig, DeviceRequest, HostConfig, LogConfig, Ulimit
|
||||||
ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest
|
|
||||||
)
|
|
||||||
from .daemon import CancellableStream
|
from .daemon import CancellableStream
|
||||||
from .healthcheck import Healthcheck
|
from .healthcheck import Healthcheck
|
||||||
from .networks import EndpointConfig, IPAMConfig, IPAMPool, NetworkingConfig
|
from .networks import EndpointConfig, IPAMConfig, IPAMPool, NetworkingConfig
|
||||||
from .services import (
|
from .services import (
|
||||||
ConfigReference, ContainerSpec, DNSConfig, DriverConfig, EndpointSpec,
|
ConfigReference,
|
||||||
Mount, Placement, PlacementPreference, Privileges, Resources,
|
ContainerSpec,
|
||||||
RestartPolicy, RollbackConfig, SecretReference, ServiceMode, TaskTemplate,
|
DNSConfig,
|
||||||
UpdateConfig, NetworkAttachmentConfig
|
DriverConfig,
|
||||||
|
EndpointSpec,
|
||||||
|
Mount,
|
||||||
|
NetworkAttachmentConfig,
|
||||||
|
Placement,
|
||||||
|
PlacementPreference,
|
||||||
|
Privileges,
|
||||||
|
Resources,
|
||||||
|
RestartPolicy,
|
||||||
|
RollbackConfig,
|
||||||
|
SecretReference,
|
||||||
|
ServiceMode,
|
||||||
|
TaskTemplate,
|
||||||
|
UpdateConfig,
|
||||||
)
|
)
|
||||||
from .swarm import SwarmSpec, SwarmExternalCA
|
from .swarm import SwarmExternalCA, SwarmSpec
|
||||||
|
|
|
@ -1,8 +1,16 @@
|
||||||
from .. import errors
|
from .. import errors
|
||||||
from ..utils.utils import (
|
from ..utils.utils import (
|
||||||
convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds,
|
convert_port_bindings,
|
||||||
format_environment, format_extra_hosts, normalize_links, parse_bytes,
|
convert_tmpfs_mounts,
|
||||||
parse_devices, split_command, version_gte, version_lt,
|
convert_volume_binds,
|
||||||
|
format_environment,
|
||||||
|
format_extra_hosts,
|
||||||
|
normalize_links,
|
||||||
|
parse_bytes,
|
||||||
|
parse_devices,
|
||||||
|
split_command,
|
||||||
|
version_gte,
|
||||||
|
version_lt,
|
||||||
)
|
)
|
||||||
from .base import DictType
|
from .base import DictType
|
||||||
from .healthcheck import Healthcheck
|
from .healthcheck import Healthcheck
|
||||||
|
|
|
@ -1,8 +1,12 @@
|
||||||
from .. import errors
|
from .. import errors
|
||||||
from ..constants import IS_WINDOWS_PLATFORM
|
from ..constants import IS_WINDOWS_PLATFORM
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
check_resource, format_environment, format_extra_hosts, parse_bytes,
|
check_resource,
|
||||||
split_command, convert_service_networks,
|
convert_service_networks,
|
||||||
|
format_environment,
|
||||||
|
format_extra_hosts,
|
||||||
|
parse_bytes,
|
||||||
|
split_command,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -238,6 +242,7 @@ class Mount(dict):
|
||||||
for the ``volume`` type.
|
for the ``volume`` type.
|
||||||
driver_config (DriverConfig): Volume driver configuration. Only valid
|
driver_config (DriverConfig): Volume driver configuration. Only valid
|
||||||
for the ``volume`` type.
|
for the ``volume`` type.
|
||||||
|
subpath (str): Path inside a volume to mount instead of the volume root.
|
||||||
tmpfs_size (int or string): The size for the tmpfs mount in bytes.
|
tmpfs_size (int or string): The size for the tmpfs mount in bytes.
|
||||||
tmpfs_mode (int): The permission mode for the tmpfs mount.
|
tmpfs_mode (int): The permission mode for the tmpfs mount.
|
||||||
"""
|
"""
|
||||||
|
@ -245,7 +250,7 @@ class Mount(dict):
|
||||||
def __init__(self, target, source, type='volume', read_only=False,
|
def __init__(self, target, source, type='volume', read_only=False,
|
||||||
consistency=None, propagation=None, no_copy=False,
|
consistency=None, propagation=None, no_copy=False,
|
||||||
labels=None, driver_config=None, tmpfs_size=None,
|
labels=None, driver_config=None, tmpfs_size=None,
|
||||||
tmpfs_mode=None):
|
tmpfs_mode=None, subpath=None):
|
||||||
self['Target'] = target
|
self['Target'] = target
|
||||||
self['Source'] = source
|
self['Source'] = source
|
||||||
if type not in ('bind', 'volume', 'tmpfs', 'npipe'):
|
if type not in ('bind', 'volume', 'tmpfs', 'npipe'):
|
||||||
|
@ -263,7 +268,7 @@ class Mount(dict):
|
||||||
self['BindOptions'] = {
|
self['BindOptions'] = {
|
||||||
'Propagation': propagation
|
'Propagation': propagation
|
||||||
}
|
}
|
||||||
if any([labels, driver_config, no_copy, tmpfs_size, tmpfs_mode]):
|
if any([labels, driver_config, no_copy, tmpfs_size, tmpfs_mode, subpath]):
|
||||||
raise errors.InvalidArgument(
|
raise errors.InvalidArgument(
|
||||||
'Incompatible options have been provided for the bind '
|
'Incompatible options have been provided for the bind '
|
||||||
'type mount.'
|
'type mount.'
|
||||||
|
@ -276,6 +281,8 @@ class Mount(dict):
|
||||||
volume_opts['Labels'] = labels
|
volume_opts['Labels'] = labels
|
||||||
if driver_config:
|
if driver_config:
|
||||||
volume_opts['DriverConfig'] = driver_config
|
volume_opts['DriverConfig'] = driver_config
|
||||||
|
if subpath:
|
||||||
|
volume_opts['Subpath'] = subpath
|
||||||
if volume_opts:
|
if volume_opts:
|
||||||
self['VolumeOptions'] = volume_opts
|
self['VolumeOptions'] = volume_opts
|
||||||
if any([propagation, tmpfs_size, tmpfs_mode]):
|
if any([propagation, tmpfs_size, tmpfs_mode]):
|
||||||
|
|
|
@ -1,13 +1,28 @@
|
||||||
|
|
||||||
from .build import create_archive, exclude_paths, mkbuildcontext, tar
|
from .build import create_archive, exclude_paths, match_tag, mkbuildcontext, tar
|
||||||
from .decorators import check_resource, minimum_version, update_headers
|
from .decorators import check_resource, minimum_version, update_headers
|
||||||
from .utils import (
|
from .utils import (
|
||||||
compare_version, convert_port_bindings, convert_volume_binds,
|
compare_version,
|
||||||
parse_repository_tag, parse_host,
|
convert_filters,
|
||||||
kwargs_from_env, convert_filters, datetime_to_timestamp,
|
convert_port_bindings,
|
||||||
create_host_config, parse_bytes, parse_env_file, version_lt,
|
convert_service_networks,
|
||||||
version_gte, decode_json_header, split_command, create_ipam_config,
|
convert_volume_binds,
|
||||||
create_ipam_pool, parse_devices, normalize_links, convert_service_networks,
|
create_host_config,
|
||||||
format_environment, format_extra_hosts
|
create_ipam_config,
|
||||||
|
create_ipam_pool,
|
||||||
|
datetime_to_timestamp,
|
||||||
|
decode_json_header,
|
||||||
|
format_environment,
|
||||||
|
format_extra_hosts,
|
||||||
|
kwargs_from_env,
|
||||||
|
normalize_links,
|
||||||
|
parse_bytes,
|
||||||
|
parse_devices,
|
||||||
|
parse_env_file,
|
||||||
|
parse_host,
|
||||||
|
parse_repository_tag,
|
||||||
|
split_command,
|
||||||
|
version_gte,
|
||||||
|
version_lt,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,19 @@ import re
|
||||||
import tarfile
|
import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from .fnmatch import fnmatch
|
|
||||||
from ..constants import IS_WINDOWS_PLATFORM
|
from ..constants import IS_WINDOWS_PLATFORM
|
||||||
|
from .fnmatch import fnmatch
|
||||||
|
|
||||||
_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/')
|
_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/')
|
||||||
|
_TAG = re.compile(
|
||||||
|
r"^[a-z0-9]+((\.|_|__|-+)[a-z0-9]+)*"
|
||||||
|
r"(?::[0-9]+)?(/[a-z0-9]+((\.|_|__|-+)[a-z0-9]+)*)*"
|
||||||
|
r"(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127})?$"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def match_tag(tag: str) -> bool:
|
||||||
|
return bool(_TAG.match(tag))
|
||||||
|
|
||||||
|
|
||||||
def tar(path, exclude=None, dockerfile=None, fileobj=None, gzip=False):
|
def tar(path, exclude=None, dockerfile=None, fileobj=None, gzip=False):
|
||||||
|
|
|
@ -3,7 +3,6 @@ import json.decoder
|
||||||
|
|
||||||
from ..errors import StreamParseError
|
from ..errors import StreamParseError
|
||||||
|
|
||||||
|
|
||||||
json_decoder = json.JSONDecoder()
|
json_decoder = json.JSONDecoder()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ def read_exactly(socket, n):
|
||||||
Reads exactly n bytes from socket
|
Reads exactly n bytes from socket
|
||||||
Raises SocketError if there isn't enough data
|
Raises SocketError if there isn't enough data
|
||||||
"""
|
"""
|
||||||
data = bytes()
|
data = b""
|
||||||
while len(data) < n:
|
while len(data) < n:
|
||||||
next_data = read(socket, n - len(data))
|
next_data = read(socket, n - len(data))
|
||||||
if not next_data:
|
if not next_data:
|
||||||
|
@ -152,7 +152,7 @@ def consume_socket_output(frames, demux=False):
|
||||||
if demux is False:
|
if demux is False:
|
||||||
# If the streams are multiplexed, the generator returns strings, that
|
# If the streams are multiplexed, the generator returns strings, that
|
||||||
# we just need to concatenate.
|
# we just need to concatenate.
|
||||||
return bytes().join(frames)
|
return b"".join(frames)
|
||||||
|
|
||||||
# If the streams are demultiplexed, the generator yields tuples
|
# If the streams are demultiplexed, the generator yields tuples
|
||||||
# (stdout, stderr)
|
# (stdout, stderr)
|
||||||
|
|
|
@ -5,18 +5,20 @@ import os
|
||||||
import os.path
|
import os.path
|
||||||
import shlex
|
import shlex
|
||||||
import string
|
import string
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from packaging.version import Version
|
from functools import lru_cache
|
||||||
|
from itertools import zip_longest
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
from .. import errors
|
from .. import errors
|
||||||
from ..constants import DEFAULT_HTTP_HOST
|
from ..constants import (
|
||||||
from ..constants import DEFAULT_UNIX_SOCKET
|
BYTE_UNITS,
|
||||||
from ..constants import DEFAULT_NPIPE
|
DEFAULT_HTTP_HOST,
|
||||||
from ..constants import BYTE_UNITS
|
DEFAULT_NPIPE,
|
||||||
|
DEFAULT_UNIX_SOCKET,
|
||||||
|
)
|
||||||
from ..tls import TLSConfig
|
from ..tls import TLSConfig
|
||||||
|
|
||||||
from urllib.parse import urlparse, urlunparse
|
|
||||||
|
|
||||||
URLComponents = collections.namedtuple(
|
URLComponents = collections.namedtuple(
|
||||||
'URLComponents',
|
'URLComponents',
|
||||||
'scheme netloc url params query fragment',
|
'scheme netloc url params query fragment',
|
||||||
|
@ -43,6 +45,7 @@ def decode_json_header(header):
|
||||||
return json.loads(data)
|
return json.loads(data)
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=None)
|
||||||
def compare_version(v1, v2):
|
def compare_version(v1, v2):
|
||||||
"""Compare docker versions
|
"""Compare docker versions
|
||||||
|
|
||||||
|
@ -55,14 +58,20 @@ def compare_version(v1, v2):
|
||||||
>>> compare_version(v2, v2)
|
>>> compare_version(v2, v2)
|
||||||
0
|
0
|
||||||
"""
|
"""
|
||||||
s1 = Version(v1)
|
if v1 == v2:
|
||||||
s2 = Version(v2)
|
|
||||||
if s1 == s2:
|
|
||||||
return 0
|
return 0
|
||||||
elif s1 > s2:
|
# Split into `sys.version_info` like tuples.
|
||||||
|
s1 = tuple(int(p) for p in v1.split('.'))
|
||||||
|
s2 = tuple(int(p) for p in v2.split('.'))
|
||||||
|
# Compare each component, padding with 0 if necessary.
|
||||||
|
for c1, c2 in zip_longest(s1, s2, fillvalue=0):
|
||||||
|
if c1 == c2:
|
||||||
|
continue
|
||||||
|
elif c1 > c2:
|
||||||
return -1
|
return -1
|
||||||
else:
|
else:
|
||||||
return 1
|
return 1
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def version_lt(v1, v2):
|
def version_lt(v1, v2):
|
||||||
|
@ -152,7 +161,7 @@ def convert_volume_binds(binds):
|
||||||
]
|
]
|
||||||
if 'propagation' in v and v['propagation'] in propagation_modes:
|
if 'propagation' in v and v['propagation'] in propagation_modes:
|
||||||
if mode:
|
if mode:
|
||||||
mode = ','.join([mode, v['propagation']])
|
mode = f"{mode},{v['propagation']}"
|
||||||
else:
|
else:
|
||||||
mode = v['propagation']
|
mode = v['propagation']
|
||||||
|
|
||||||
|
@ -394,8 +403,8 @@ def convert_filters(filters):
|
||||||
|
|
||||||
|
|
||||||
def datetime_to_timestamp(dt):
|
def datetime_to_timestamp(dt):
|
||||||
"""Convert a UTC datetime to a Unix timestamp"""
|
"""Convert a datetime to a Unix timestamp"""
|
||||||
delta = dt - datetime.utcfromtimestamp(0)
|
delta = dt.astimezone(timezone.utc) - datetime(1970, 1, 1, tzinfo=timezone.utc)
|
||||||
return delta.seconds + delta.days * 24 * 3600
|
return delta.seconds + delta.days * 24 * 3600
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
try:
|
try:
|
||||||
from ._version import __version__
|
from ._version import __version__
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from importlib.metadata import version, PackageNotFoundError
|
from importlib.metadata import PackageNotFoundError, version
|
||||||
try:
|
try:
|
||||||
__version__ = version('docker')
|
__version__ = version('docker')
|
||||||
except PackageNotFoundError:
|
except PackageNotFoundError:
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
myst-parser==0.18.0
|
|
||||||
Sphinx==5.1.1
|
|
|
@ -1,6 +1,27 @@
|
||||||
Changelog
|
Changelog
|
||||||
==========
|
==========
|
||||||
|
|
||||||
|
7.1.0
|
||||||
|
-----
|
||||||
|
### Upgrade Notes
|
||||||
|
- Bumped minimum engine API version to 1.24
|
||||||
|
- Bumped default engine API version to 1.44 (Moby 25.0)
|
||||||
|
|
||||||
|
### Bugfixes
|
||||||
|
- Fixed issue with tag parsing when the registry address includes ports that resulted in `invalid tag format` errors
|
||||||
|
- Fixed issue preventing creating new configs (`ConfigCollection`), which failed with a `KeyError` due to the `name` field
|
||||||
|
- Fixed an issue due to an update in the [requests](https://github.com/psf/requests) package breaking `docker-py` by applying the [suggested fix](https://github.com/psf/requests/pull/6710)
|
||||||
|
|
||||||
|
### Miscellaneous
|
||||||
|
- Documentation improvements
|
||||||
|
- Updated Ruff (linter) and fixed minor linting issues
|
||||||
|
- Packaging/CI updates
|
||||||
|
- Started using hatch for packaging (https://github.com/pypa/hatch)
|
||||||
|
- Updated `setup-python` github action
|
||||||
|
- Updated tests
|
||||||
|
- Stopped checking for deprecated container and image related fields (`Container` and `ContainerConfig`)
|
||||||
|
- Updated tests that check `NetworkSettings.Networks.<network>.Aliases` due to engine changes
|
||||||
|
|
||||||
7.0.0
|
7.0.0
|
||||||
-----
|
-----
|
||||||
### Upgrade Notes
|
### Upgrade Notes
|
||||||
|
@ -20,9 +41,11 @@ Changelog
|
||||||
- Add `health()` property to container that returns status (e.g. `unhealthy`)
|
- Add `health()` property to container that returns status (e.g. `unhealthy`)
|
||||||
- Add `pause` option to `container.commit()`
|
- Add `pause` option to `container.commit()`
|
||||||
- Add support for bind mount propagation (e.g. `rshared`, `private`)
|
- Add support for bind mount propagation (e.g. `rshared`, `private`)
|
||||||
|
- Add `filters`, `keep_storage`, and `all` parameters to `prune_builds()` (requires API v1.39+)
|
||||||
|
|
||||||
### Bugfixes
|
### Bugfixes
|
||||||
- Consistently return `docker.errors.NotFound` on 404 responses
|
- Consistently return `docker.errors.NotFound` on 404 responses
|
||||||
|
- Validate tag format before image push
|
||||||
|
|
||||||
### Miscellaneous
|
### Miscellaneous
|
||||||
- Upgraded urllib3 version in `requirements.txt` (used for development/tests)
|
- Upgraded urllib3 version in `requirements.txt` (used for development/tests)
|
||||||
|
|
|
@ -19,6 +19,7 @@ import datetime
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from importlib.metadata import version
|
from importlib.metadata import version
|
||||||
|
|
||||||
sys.path.insert(0, os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath('..'))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,18 +1,100 @@
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"]
|
requires = ["hatchling", "hatch-vcs"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
[tool.setuptools_scm]
|
[project]
|
||||||
write_to = 'docker/_version.py'
|
name = "docker"
|
||||||
|
dynamic = ["version"]
|
||||||
|
description = "A Python library for the Docker Engine API."
|
||||||
|
readme = "README.md"
|
||||||
|
license = "Apache-2.0"
|
||||||
|
requires-python = ">=3.8"
|
||||||
|
maintainers = [
|
||||||
|
{ name = "Docker Inc.", email = "no-reply@docker.com" },
|
||||||
|
]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Environment :: Other Environment",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: Apache Software License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Topic :: Software Development",
|
||||||
|
"Topic :: Utilities",
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"requests >= 2.26.0",
|
||||||
|
"urllib3 >= 1.26.0",
|
||||||
|
"pywin32>=304; sys_platform == \"win32\"",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
# ssh feature allows DOCKER_HOST=ssh://... style connections
|
||||||
|
ssh = [
|
||||||
|
"paramiko>=2.4.3",
|
||||||
|
]
|
||||||
|
# tls is always supported, the feature is a no-op for backwards compatibility
|
||||||
|
tls = []
|
||||||
|
# websockets can be used as an alternate container attach mechanism but
|
||||||
|
# by default docker-py hijacks the TCP connection and does not use Websockets
|
||||||
|
# unless attach_socket(container, ws=True) is called
|
||||||
|
websockets = [
|
||||||
|
"websocket-client >= 1.3.0",
|
||||||
|
]
|
||||||
|
# docs are dependencies required to build the ReadTheDocs site
|
||||||
|
# this is only needed for CI / working on the docs!
|
||||||
|
docs = [
|
||||||
|
"myst-parser==0.18.0",
|
||||||
|
"Sphinx==5.1.1",
|
||||||
|
|
||||||
|
]
|
||||||
|
# dev are dependencies required to test & lint this project
|
||||||
|
# this is only needed if you are making code changes to docker-py!
|
||||||
|
dev = [
|
||||||
|
"coverage==7.2.7",
|
||||||
|
"pytest==7.4.2",
|
||||||
|
"pytest-cov==4.1.0",
|
||||||
|
"pytest-timeout==2.1.0",
|
||||||
|
"ruff==0.1.8",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Changelog = "https://docker-py.readthedocs.io/en/stable/change-log.html"
|
||||||
|
Documentation = "https://docker-py.readthedocs.io"
|
||||||
|
Homepage = "https://github.com/docker/docker-py"
|
||||||
|
Source = "https://github.com/docker/docker-py"
|
||||||
|
Tracker = "https://github.com/docker/docker-py/issues"
|
||||||
|
|
||||||
|
[tool.hatch.version]
|
||||||
|
source = "vcs"
|
||||||
|
|
||||||
|
[tool.hatch.build.hooks.vcs]
|
||||||
|
version-file = "docker/_version.py"
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.sdist]
|
||||||
|
include = [
|
||||||
|
"/docker",
|
||||||
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
target-version = "py37"
|
target-version = "py38"
|
||||||
extend-select = [
|
extend-select = [
|
||||||
"B",
|
"B",
|
||||||
"C",
|
"C",
|
||||||
"F",
|
"F",
|
||||||
|
"I",
|
||||||
|
"UP",
|
||||||
"W",
|
"W",
|
||||||
]
|
]
|
||||||
ignore = [
|
ignore = [
|
||||||
|
"UP012", # unnecessary `UTF-8` argument (we want to be explicit)
|
||||||
"C901", # too complex (there's a whole bunch of these)
|
"C901", # too complex (there's a whole bunch of these)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
packaging==21.3
|
|
||||||
paramiko==2.11.0
|
|
||||||
pywin32==304; sys_platform == 'win32'
|
|
||||||
requests==2.31.0
|
|
||||||
urllib3==1.26.18
|
|
||||||
websocket-client==1.3.3
|
|
|
@ -1,3 +0,0 @@
|
||||||
[metadata]
|
|
||||||
description_file = README.rst
|
|
||||||
license = Apache License 2.0
|
|
83
setup.py
83
setup.py
|
@ -1,83 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import os
|
|
||||||
|
|
||||||
from setuptools import find_packages
|
|
||||||
from setuptools import setup
|
|
||||||
|
|
||||||
ROOT_DIR = os.path.dirname(__file__)
|
|
||||||
SOURCE_DIR = os.path.join(ROOT_DIR)
|
|
||||||
|
|
||||||
requirements = [
|
|
||||||
'packaging >= 14.0',
|
|
||||||
'requests >= 2.26.0',
|
|
||||||
'urllib3 >= 1.26.0',
|
|
||||||
]
|
|
||||||
|
|
||||||
extras_require = {
|
|
||||||
# win32 APIs if on Windows (required for npipe support)
|
|
||||||
':sys_platform == "win32"': 'pywin32>=304',
|
|
||||||
|
|
||||||
# This is now a no-op, as similarly the requests[security] extra is
|
|
||||||
# a no-op as of requests 2.26.0, this is always available/by default now
|
|
||||||
# see https://github.com/psf/requests/pull/5867
|
|
||||||
'tls': [],
|
|
||||||
|
|
||||||
# Only required when connecting using the ssh:// protocol
|
|
||||||
'ssh': ['paramiko>=2.4.3'],
|
|
||||||
|
|
||||||
# Only required when using websockets
|
|
||||||
'websockets': ['websocket-client >= 1.3.0'],
|
|
||||||
}
|
|
||||||
|
|
||||||
with open('./test-requirements.txt') as test_reqs_txt:
|
|
||||||
test_requirements = list(test_reqs_txt)
|
|
||||||
|
|
||||||
|
|
||||||
long_description = ''
|
|
||||||
with codecs.open('./README.md', encoding='utf-8') as readme_md:
|
|
||||||
long_description = readme_md.read()
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name="docker",
|
|
||||||
use_scm_version={
|
|
||||||
'write_to': 'docker/_version.py'
|
|
||||||
},
|
|
||||||
description="A Python library for the Docker Engine API.",
|
|
||||||
long_description=long_description,
|
|
||||||
long_description_content_type='text/markdown',
|
|
||||||
url='https://github.com/docker/docker-py',
|
|
||||||
project_urls={
|
|
||||||
'Documentation': 'https://docker-py.readthedocs.io',
|
|
||||||
'Changelog': 'https://docker-py.readthedocs.io/en/stable/change-log.html',
|
|
||||||
'Source': 'https://github.com/docker/docker-py',
|
|
||||||
'Tracker': 'https://github.com/docker/docker-py/issues',
|
|
||||||
},
|
|
||||||
packages=find_packages(exclude=["tests.*", "tests"]),
|
|
||||||
setup_requires=['setuptools_scm'],
|
|
||||||
install_requires=requirements,
|
|
||||||
tests_require=test_requirements,
|
|
||||||
extras_require=extras_require,
|
|
||||||
python_requires='>=3.8',
|
|
||||||
zip_safe=False,
|
|
||||||
test_suite='tests',
|
|
||||||
classifiers=[
|
|
||||||
'Development Status :: 5 - Production/Stable',
|
|
||||||
'Environment :: Other Environment',
|
|
||||||
'Intended Audience :: Developers',
|
|
||||||
'Operating System :: OS Independent',
|
|
||||||
'Programming Language :: Python',
|
|
||||||
'Programming Language :: Python :: 3',
|
|
||||||
'Programming Language :: Python :: 3.8',
|
|
||||||
'Programming Language :: Python :: 3.9',
|
|
||||||
'Programming Language :: Python :: 3.10',
|
|
||||||
'Programming Language :: Python :: 3.11',
|
|
||||||
'Programming Language :: Python :: 3.12',
|
|
||||||
'Topic :: Software Development',
|
|
||||||
'Topic :: Utilities',
|
|
||||||
'License :: OSI Approved :: Apache Software License',
|
|
||||||
],
|
|
||||||
maintainer='Ulysses Souza',
|
|
||||||
maintainer_email='ulysses.souza@docker.com',
|
|
||||||
)
|
|
|
@ -1,6 +0,0 @@
|
||||||
setuptools==65.5.1
|
|
||||||
coverage==7.2.7
|
|
||||||
ruff==0.0.284
|
|
||||||
pytest==7.4.2
|
|
||||||
pytest-cov==4.1.0
|
|
||||||
pytest-timeout==2.1.0
|
|
|
@ -1,7 +1,6 @@
|
||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
ARG PYTHON_VERSION=3.12
|
ARG PYTHON_VERSION=3.12
|
||||||
|
|
||||||
FROM python:${PYTHON_VERSION}
|
FROM python:${PYTHON_VERSION}
|
||||||
|
|
||||||
RUN apt-get update && apt-get -y install --no-install-recommends \
|
RUN apt-get update && apt-get -y install --no-install-recommends \
|
||||||
|
@ -27,16 +26,10 @@ RUN curl -sSL -o /opt/docker-credential-pass.tar.gz \
|
||||||
chmod +x /usr/local/bin/docker-credential-pass
|
chmod +x /usr/local/bin/docker-credential-pass
|
||||||
|
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
|
COPY . .
|
||||||
|
|
||||||
COPY requirements.txt /src/requirements.txt
|
ARG VERSION=0.0.0.dev0
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
RUN --mount=type=cache,target=/cache/pip \
|
||||||
pip install -r requirements.txt
|
PIP_CACHE_DIR=/cache/pip \
|
||||||
|
SETUPTOOLS_SCM_PRETEND_VERSION=${VERSION} \
|
||||||
COPY test-requirements.txt /src/test-requirements.txt
|
pip install .[dev,ssh,websockets]
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
|
||||||
pip install -r test-requirements.txt
|
|
||||||
|
|
||||||
COPY . /src
|
|
||||||
ARG SETUPTOOLS_SCM_PRETEND_VERSION=99.0.0+docker
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
|
||||||
pip install -e .
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
ARG API_VERSION=1.41
|
ARG API_VERSION=1.45
|
||||||
ARG ENGINE_VERSION=20.10
|
ARG ENGINE_VERSION=26.1
|
||||||
|
|
||||||
FROM docker:${ENGINE_VERSION}-dind
|
FROM docker:${ENGINE_VERSION}-dind
|
||||||
|
|
||||||
|
|
|
@ -8,10 +8,11 @@ import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import docker
|
|
||||||
import paramiko
|
import paramiko
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
|
|
||||||
def make_tree(dirs, files):
|
def make_tree(dirs, files):
|
||||||
base = tempfile.mkdtemp()
|
base = tempfile.mkdtemp()
|
||||||
|
|
|
@ -3,13 +3,13 @@ import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from docker import errors
|
from docker import errors
|
||||||
from docker.utils.proxy import ProxyConfig
|
from docker.utils.proxy import ProxyConfig
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
|
||||||
from ..helpers import random_name, requires_api_version, requires_experimental
|
from ..helpers import random_name, requires_api_version, requires_experimental
|
||||||
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class BuildTest(BaseAPIIntegrationTest):
|
class BuildTest(BaseAPIIntegrationTest):
|
||||||
|
@ -275,7 +275,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
info = self.client.inspect_image('build1')
|
info = self.client.inspect_image('build1')
|
||||||
assert not info['Config']['OnBuild']
|
assert 'OnBuild' not in info['Config'] or not info['Config']['OnBuild']
|
||||||
|
|
||||||
@requires_api_version('1.25')
|
@requires_api_version('1.25')
|
||||||
def test_build_with_network_mode(self):
|
def test_build_with_network_mode(self):
|
||||||
|
@ -389,9 +389,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
lines = []
|
lines = []
|
||||||
for chunk in stream:
|
for chunk in stream:
|
||||||
lines.append(chunk.get('stream'))
|
lines.append(chunk.get('stream'))
|
||||||
expected = '{0}{2}\n{1}'.format(
|
expected = f'{control_chars[0]}{snippet}\n{control_chars[1]}'
|
||||||
control_chars[0], control_chars[1], snippet
|
|
||||||
)
|
|
||||||
assert any(line == expected for line in lines)
|
assert any(line == expected for line in lines)
|
||||||
|
|
||||||
def test_build_gzip_encoding(self):
|
def test_build_gzip_encoding(self):
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import docker
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
from ..helpers import force_leave_swarm, requires_api_version
|
from ..helpers import force_leave_swarm, requires_api_version
|
||||||
from .base import BaseAPIIntegrationTest
|
from .base import BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
|
@ -9,15 +9,17 @@ import pytest
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
from .. import helpers
|
|
||||||
from ..helpers import assert_cat_socket_detached_with_keys
|
|
||||||
from ..helpers import ctrl_with
|
|
||||||
from ..helpers import requires_api_version, skip_if_desktop
|
|
||||||
from .base import BaseAPIIntegrationTest
|
|
||||||
from .base import TEST_IMG
|
|
||||||
from docker.constants import IS_WINDOWS_PLATFORM
|
from docker.constants import IS_WINDOWS_PLATFORM
|
||||||
from docker.utils.socket import next_frame_header
|
from docker.utils.socket import next_frame_header, read_exactly
|
||||||
from docker.utils.socket import read_exactly
|
|
||||||
|
from .. import helpers
|
||||||
|
from ..helpers import (
|
||||||
|
assert_cat_socket_detached_with_keys,
|
||||||
|
ctrl_with,
|
||||||
|
requires_api_version,
|
||||||
|
skip_if_desktop,
|
||||||
|
)
|
||||||
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class ListContainersTest(BaseAPIIntegrationTest):
|
class ListContainersTest(BaseAPIIntegrationTest):
|
||||||
|
@ -618,6 +620,56 @@ class VolumeBindTest(BaseAPIIntegrationTest):
|
||||||
assert mount['Source'] == mount_data['Name']
|
assert mount['Source'] == mount_data['Name']
|
||||||
assert mount_data['RW'] is True
|
assert mount_data['RW'] is True
|
||||||
|
|
||||||
|
@requires_api_version('1.45')
|
||||||
|
def test_create_with_subpath_volume_mount(self):
|
||||||
|
source_volume = helpers.random_name()
|
||||||
|
self.client.create_volume(name=source_volume)
|
||||||
|
|
||||||
|
setup_container = None
|
||||||
|
test_container = None
|
||||||
|
|
||||||
|
|
||||||
|
# Create a file structure in the volume to test with
|
||||||
|
setup_container = self.client.create_container(
|
||||||
|
TEST_IMG,
|
||||||
|
[
|
||||||
|
"sh",
|
||||||
|
"-c",
|
||||||
|
'mkdir -p /vol/subdir && echo "test content" > /vol/subdir/testfile.txt',
|
||||||
|
],
|
||||||
|
host_config=self.client.create_host_config(
|
||||||
|
binds=[f"{source_volume}:/vol"]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.client.start(setup_container)
|
||||||
|
self.client.wait(setup_container)
|
||||||
|
|
||||||
|
# Now test with subpath
|
||||||
|
mount = docker.types.Mount(
|
||||||
|
type="volume",
|
||||||
|
source=source_volume,
|
||||||
|
target=self.mount_dest,
|
||||||
|
read_only=True,
|
||||||
|
subpath="subdir",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
host_config = self.client.create_host_config(mounts=[mount])
|
||||||
|
test_container = self.client.create_container(
|
||||||
|
TEST_IMG,
|
||||||
|
["cat", os.path.join(self.mount_dest, "testfile.txt")],
|
||||||
|
host_config=host_config,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.client.start(test_container)
|
||||||
|
self.client.wait(test_container) # Wait for container to finish
|
||||||
|
output = self.client.logs(test_container).decode("utf-8").strip()
|
||||||
|
|
||||||
|
# If the subpath feature is working, we should be able to see the content
|
||||||
|
# of the file in the subdir
|
||||||
|
assert output == "test content"
|
||||||
|
|
||||||
|
|
||||||
def check_container_data(self, inspect_data, rw, propagation='rprivate'):
|
def check_container_data(self, inspect_data, rw, propagation='rprivate'):
|
||||||
assert 'Mounts' in inspect_data
|
assert 'Mounts' in inspect_data
|
||||||
filtered = list(filter(
|
filtered = list(filter(
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
from ..helpers import assert_cat_socket_detached_with_keys
|
|
||||||
from ..helpers import ctrl_with
|
|
||||||
from ..helpers import requires_api_version
|
|
||||||
from .base import BaseAPIIntegrationTest
|
|
||||||
from .base import TEST_IMG
|
|
||||||
from docker.utils.proxy import ProxyConfig
|
from docker.utils.proxy import ProxyConfig
|
||||||
from docker.utils.socket import next_frame_header
|
from docker.utils.socket import next_frame_header, read_exactly
|
||||||
from docker.utils.socket import read_exactly
|
|
||||||
|
from ..helpers import (
|
||||||
|
assert_cat_socket_detached_with_keys,
|
||||||
|
ctrl_with,
|
||||||
|
requires_api_version,
|
||||||
|
)
|
||||||
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class ExecTest(BaseAPIIntegrationTest):
|
class ExecTest(BaseAPIIntegrationTest):
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
|
||||||
from .. import helpers
|
from .. import helpers
|
||||||
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
|
|
||||||
SECOND = 1000000000
|
SECOND = 1000000000
|
||||||
|
|
||||||
|
|
|
@ -2,19 +2,18 @@ import contextlib
|
||||||
import json
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
|
import socketserver
|
||||||
import tarfile
|
import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
|
from http.server import SimpleHTTPRequestHandler
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from http.server import SimpleHTTPRequestHandler
|
|
||||||
import socketserver
|
|
||||||
|
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from ..helpers import requires_api_version, requires_experimental
|
from ..helpers import requires_api_version, requires_experimental
|
||||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class ListImagesTest(BaseAPIIntegrationTest):
|
class ListImagesTest(BaseAPIIntegrationTest):
|
||||||
|
@ -85,13 +84,8 @@ class CommitTest(BaseAPIIntegrationTest):
|
||||||
img_id = res['Id']
|
img_id = res['Id']
|
||||||
self.tmp_imgs.append(img_id)
|
self.tmp_imgs.append(img_id)
|
||||||
img = self.client.inspect_image(img_id)
|
img = self.client.inspect_image(img_id)
|
||||||
assert 'Container' in img
|
|
||||||
assert img['Container'].startswith(id)
|
|
||||||
assert 'ContainerConfig' in img
|
|
||||||
assert 'Image' in img['ContainerConfig']
|
|
||||||
assert TEST_IMG == img['ContainerConfig']['Image']
|
|
||||||
busybox_id = self.client.inspect_image(TEST_IMG)['Id']
|
|
||||||
assert 'Parent' in img
|
assert 'Parent' in img
|
||||||
|
busybox_id = self.client.inspect_image(TEST_IMG)['Id']
|
||||||
assert img['Parent'] == busybox_id
|
assert img['Parent'] == busybox_id
|
||||||
|
|
||||||
def test_commit_with_changes(self):
|
def test_commit_with_changes(self):
|
||||||
|
@ -103,8 +97,6 @@ class CommitTest(BaseAPIIntegrationTest):
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append(img_id)
|
self.tmp_imgs.append(img_id)
|
||||||
img = self.client.inspect_image(img_id)
|
img = self.client.inspect_image(img_id)
|
||||||
assert 'Container' in img
|
|
||||||
assert img['Container'].startswith(cid['Id'])
|
|
||||||
assert '8000/tcp' in img['Config']['ExposedPorts']
|
assert '8000/tcp' in img['Config']['ExposedPorts']
|
||||||
assert img['Config']['Cmd'] == ['bash']
|
assert img['Config']['Cmd'] == ['bash']
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
import docker
|
|
||||||
from docker.types import IPAMConfig, IPAMPool
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
from docker.types import IPAMConfig, IPAMPool
|
||||||
|
|
||||||
from ..helpers import random_name, requires_api_version
|
from ..helpers import random_name, requires_api_version
|
||||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class TestNetworks(BaseAPIIntegrationTest):
|
class TestNetworks(BaseAPIIntegrationTest):
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import docker
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from .base import BaseAPIIntegrationTest
|
import docker
|
||||||
|
|
||||||
from ..helpers import requires_api_version
|
from ..helpers import requires_api_version
|
||||||
|
from .base import BaseAPIIntegrationTest
|
||||||
|
|
||||||
SSHFS = 'vieux/sshfs:latest'
|
SSHFS = 'vieux/sshfs:latest'
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import docker
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
from ..helpers import force_leave_swarm, requires_api_version
|
from ..helpers import force_leave_swarm, requires_api_version
|
||||||
from .base import BaseAPIIntegrationTest
|
from .base import BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import docker
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from ..helpers import (
|
import docker
|
||||||
force_leave_swarm, requires_api_version, requires_experimental
|
|
||||||
)
|
from ..helpers import force_leave_swarm, requires_api_version
|
||||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class ServiceTest(BaseAPIIntegrationTest):
|
class ServiceTest(BaseAPIIntegrationTest):
|
||||||
|
@ -141,8 +140,7 @@ class ServiceTest(BaseAPIIntegrationTest):
|
||||||
assert len(services) == 1
|
assert len(services) == 1
|
||||||
assert services[0]['ID'] == svc_id['ID']
|
assert services[0]['ID'] == svc_id['ID']
|
||||||
|
|
||||||
@requires_api_version('1.25')
|
@requires_api_version('1.29')
|
||||||
@requires_experimental(until='1.29')
|
|
||||||
def test_service_logs(self):
|
def test_service_logs(self):
|
||||||
name, svc_id = self.create_simple_service()
|
name, svc_id = self.create_simple_service()
|
||||||
assert self.get_service_container(name, include_stopped=True)
|
assert self.get_service_container(name, include_stopped=True)
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
import copy
|
import copy
|
||||||
import docker
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
from ..helpers import force_leave_swarm, requires_api_version
|
from ..helpers import force_leave_swarm, requires_api_version
|
||||||
from .base import BaseAPIIntegrationTest
|
from .base import BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import docker
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
from ..helpers import requires_api_version
|
from ..helpers import requires_api_version
|
||||||
from .base import BaseAPIIntegrationTest
|
from .base import BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
@ -16,10 +17,16 @@ class TestVolumes(BaseAPIIntegrationTest):
|
||||||
assert result['Driver'] == 'local'
|
assert result['Driver'] == 'local'
|
||||||
|
|
||||||
def test_create_volume_invalid_driver(self):
|
def test_create_volume_invalid_driver(self):
|
||||||
driver_name = 'invalid.driver'
|
# special name to avoid exponential timeout loop
|
||||||
|
# https://github.com/moby/moby/blob/9e00a63d65434cdedc444e79a2b33a7c202b10d8/pkg/plugins/client.go#L253-L254
|
||||||
|
driver_name = 'this-plugin-does-not-exist'
|
||||||
|
|
||||||
with pytest.raises(docker.errors.NotFound):
|
with pytest.raises(docker.errors.APIError) as cm:
|
||||||
self.client.create_volume('perfectcherryblossom', driver_name)
|
self.client.create_volume('perfectcherryblossom', driver_name)
|
||||||
|
assert (
|
||||||
|
cm.value.response.status_code == 404 or
|
||||||
|
cm.value.response.status_code == 400
|
||||||
|
)
|
||||||
|
|
||||||
def test_list_volumes(self):
|
def test_list_volumes(self):
|
||||||
name = 'imperishablenight'
|
name = 'imperishablenight'
|
||||||
|
|
|
@ -3,9 +3,10 @@ import shutil
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
from .. import helpers
|
|
||||||
from docker.utils import kwargs_from_env
|
from docker.utils import kwargs_from_env
|
||||||
|
|
||||||
|
from .. import helpers
|
||||||
|
|
||||||
TEST_IMG = 'alpine:3.10'
|
TEST_IMG = 'alpine:3.10'
|
||||||
TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')
|
TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
import threading
|
import threading
|
||||||
import unittest
|
import unittest
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
from ..helpers import requires_api_version
|
from ..helpers import requires_api_version
|
||||||
from .base import TEST_API_VERSION
|
from .base import TEST_API_VERSION
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
import docker.errors
|
import docker.errors
|
||||||
from docker.utils import kwargs_from_env
|
from docker.utils import kwargs_from_env
|
||||||
import pytest
|
|
||||||
|
|
||||||
from .base import TEST_IMG
|
from .base import TEST_IMG
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from docker import errors
|
from docker import errors
|
||||||
from docker.context import ContextAPI
|
from docker.context import ContextAPI
|
||||||
from docker.tls import TLSConfig
|
from docker.tls import TLSConfig
|
||||||
|
|
||||||
from .base import BaseAPIIntegrationTest
|
from .base import BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,11 @@ import sys
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from docker.credentials import (
|
from docker.credentials import (
|
||||||
CredentialsNotFound, Store, StoreError, DEFAULT_LINUX_STORE,
|
DEFAULT_LINUX_STORE,
|
||||||
DEFAULT_OSX_STORE
|
DEFAULT_OSX_STORE,
|
||||||
|
CredentialsNotFound,
|
||||||
|
Store,
|
||||||
|
StoreError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import os
|
import os
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
from docker.credentials.utils import create_environment_dict
|
from docker.credentials.utils import create_environment_dict
|
||||||
from unittest import mock
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.dict(os.environ)
|
@mock.patch.dict(os.environ)
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
from docker.errors import APIError
|
|
||||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from docker.errors import APIError
|
||||||
|
|
||||||
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class ErrorsTest(BaseAPIIntegrationTest):
|
class ErrorsTest(BaseAPIIntegrationTest):
|
||||||
def test_api_error_parses_json(self):
|
def test_api_error_parses_json(self):
|
||||||
|
|
|
@ -5,10 +5,9 @@ import threading
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
from .base import BaseIntegrationTest
|
|
||||||
from .base import TEST_API_VERSION
|
from ..helpers import random_name, requires_api_version
|
||||||
from ..helpers import random_name
|
from .base import TEST_API_VERSION, BaseIntegrationTest
|
||||||
from ..helpers import requires_api_version
|
|
||||||
|
|
||||||
|
|
||||||
class ContainerCollectionTest(BaseIntegrationTest):
|
class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
|
@ -110,12 +109,12 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
client.networks.create(net_name)
|
client.networks.create(net_name)
|
||||||
self.tmp_networks.append(net_name)
|
self.tmp_networks.append(net_name)
|
||||||
|
|
||||||
test_aliases = ['hello']
|
test_alias = 'hello'
|
||||||
test_driver_opt = {'key1': 'a'}
|
test_driver_opt = {'key1': 'a'}
|
||||||
|
|
||||||
networking_config = {
|
networking_config = {
|
||||||
net_name: client.api.create_endpoint_config(
|
net_name: client.api.create_endpoint_config(
|
||||||
aliases=test_aliases,
|
aliases=[test_alias],
|
||||||
driver_opt=test_driver_opt
|
driver_opt=test_driver_opt
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -132,8 +131,9 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
assert 'NetworkSettings' in attrs
|
assert 'NetworkSettings' in attrs
|
||||||
assert 'Networks' in attrs['NetworkSettings']
|
assert 'Networks' in attrs['NetworkSettings']
|
||||||
assert list(attrs['NetworkSettings']['Networks'].keys()) == [net_name]
|
assert list(attrs['NetworkSettings']['Networks'].keys()) == [net_name]
|
||||||
assert attrs['NetworkSettings']['Networks'][net_name]['Aliases'] == \
|
# Aliases no longer include the container's short-id in API v1.45.
|
||||||
test_aliases
|
assert attrs['NetworkSettings']['Networks'][net_name]['Aliases'] \
|
||||||
|
== [test_alias]
|
||||||
assert attrs['NetworkSettings']['Networks'][net_name]['DriverOpts'] \
|
assert attrs['NetworkSettings']['Networks'][net_name]['DriverOpts'] \
|
||||||
== test_driver_opt
|
== test_driver_opt
|
||||||
|
|
||||||
|
@ -190,7 +190,9 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
assert 'NetworkSettings' in attrs
|
assert 'NetworkSettings' in attrs
|
||||||
assert 'Networks' in attrs['NetworkSettings']
|
assert 'Networks' in attrs['NetworkSettings']
|
||||||
assert list(attrs['NetworkSettings']['Networks'].keys()) == [net_name]
|
assert list(attrs['NetworkSettings']['Networks'].keys()) == [net_name]
|
||||||
assert attrs['NetworkSettings']['Networks'][net_name]['Aliases'] is None
|
# Aliases no longer include the container's short-id in API v1.45.
|
||||||
|
assert (attrs['NetworkSettings']['Networks'][net_name]['Aliases']
|
||||||
|
is None)
|
||||||
assert (attrs['NetworkSettings']['Networks'][net_name]['DriverOpts']
|
assert (attrs['NetworkSettings']['Networks'][net_name]['DriverOpts']
|
||||||
is None)
|
is None)
|
||||||
|
|
||||||
|
@ -350,14 +352,26 @@ class ContainerTest(BaseIntegrationTest):
|
||||||
assert exec_output[0] == 0
|
assert exec_output[0] == 0
|
||||||
assert exec_output[1] == b"hello\n"
|
assert exec_output[1] == b"hello\n"
|
||||||
|
|
||||||
|
def test_exec_run_error_code_from_exec(self):
|
||||||
|
client = docker.from_env(version=TEST_API_VERSION)
|
||||||
|
container = client.containers.run(
|
||||||
|
"alpine", "sh -c 'sleep 20'", detach=True
|
||||||
|
)
|
||||||
|
self.tmp_containers.append(container.id)
|
||||||
|
exec_output = container.exec_run("sh -c 'exit 42'")
|
||||||
|
assert exec_output[0] == 42
|
||||||
|
|
||||||
def test_exec_run_failed(self):
|
def test_exec_run_failed(self):
|
||||||
client = docker.from_env(version=TEST_API_VERSION)
|
client = docker.from_env(version=TEST_API_VERSION)
|
||||||
container = client.containers.run(
|
container = client.containers.run(
|
||||||
"alpine", "sh -c 'sleep 60'", detach=True
|
"alpine", "sh -c 'sleep 60'", detach=True
|
||||||
)
|
)
|
||||||
self.tmp_containers.append(container.id)
|
self.tmp_containers.append(container.id)
|
||||||
exec_output = container.exec_run("docker ps")
|
exec_output = container.exec_run("non-existent")
|
||||||
assert exec_output[0] == 126
|
# older versions of docker return `126` in the case that an exec cannot
|
||||||
|
# be started due to a missing executable. We're fixing this for the
|
||||||
|
# future, so accept both for now.
|
||||||
|
assert exec_output[0] == 127 or exec_output[0] == 126
|
||||||
|
|
||||||
def test_kill(self):
|
def test_kill(self):
|
||||||
client = docker.from_env(version=TEST_API_VERSION)
|
client = docker.from_env(version=TEST_API_VERSION)
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
import io
|
import io
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import docker
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from .base import BaseIntegrationTest, TEST_IMG, TEST_API_VERSION
|
import docker
|
||||||
|
|
||||||
from ..helpers import random_name
|
from ..helpers import random_name
|
||||||
|
from .base import TEST_API_VERSION, TEST_IMG, BaseIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class ImageCollectionTest(BaseIntegrationTest):
|
class ImageCollectionTest(BaseIntegrationTest):
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from .. import helpers
|
from .. import helpers
|
||||||
from .base import BaseIntegrationTest, TEST_API_VERSION
|
from .base import TEST_API_VERSION, BaseIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class NetworkCollectionTest(BaseIntegrationTest):
|
class NetworkCollectionTest(BaseIntegrationTest):
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import docker
|
import docker
|
||||||
from .base import BaseIntegrationTest, TEST_API_VERSION
|
|
||||||
|
from .base import TEST_API_VERSION, BaseIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class ModelTest(BaseIntegrationTest):
|
class ModelTest(BaseIntegrationTest):
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import docker
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
from docker.errors import InvalidArgument
|
||||||
|
from docker.types.services import ServiceMode
|
||||||
|
|
||||||
from .. import helpers
|
from .. import helpers
|
||||||
from .base import TEST_API_VERSION
|
from .base import TEST_API_VERSION
|
||||||
from docker.errors import InvalidArgument
|
|
||||||
from docker.types.services import ServiceMode
|
|
||||||
|
|
||||||
|
|
||||||
class ServiceTest(unittest.TestCase):
|
class ServiceTest(unittest.TestCase):
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from .. import helpers
|
from .. import helpers
|
||||||
from .base import TEST_API_VERSION
|
from .base import TEST_API_VERSION
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class SwarmTest(unittest.TestCase):
|
class SwarmTest(unittest.TestCase):
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import docker
|
import docker
|
||||||
from .base import BaseIntegrationTest, TEST_API_VERSION
|
|
||||||
|
from .base import TEST_API_VERSION, BaseIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class VolumesTest(BaseIntegrationTest):
|
class VolumesTest(BaseIntegrationTest):
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
import io
|
import io
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class TestRegressions(BaseAPIIntegrationTest):
|
class TestRegressions(BaseAPIIntegrationTest):
|
||||||
|
|
|
@ -3,13 +3,13 @@ import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from docker import errors
|
from docker import errors
|
||||||
from docker.utils.proxy import ProxyConfig
|
from docker.utils.proxy import ProxyConfig
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
|
||||||
from ..helpers import random_name, requires_api_version, requires_experimental
|
from ..helpers import random_name, requires_api_version, requires_experimental
|
||||||
|
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||||
|
|
||||||
|
|
||||||
class BuildTest(BaseAPIIntegrationTest):
|
class BuildTest(BaseAPIIntegrationTest):
|
||||||
|
@ -266,7 +266,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
info = self.client.inspect_image('build1')
|
info = self.client.inspect_image('build1')
|
||||||
assert not info['Config']['OnBuild']
|
assert 'OnBuild' not in info['Config'] or not info['Config']['OnBuild']
|
||||||
|
|
||||||
@requires_api_version('1.25')
|
@requires_api_version('1.25')
|
||||||
def test_build_with_network_mode(self):
|
def test_build_with_network_mode(self):
|
||||||
|
@ -380,9 +380,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
lines = []
|
lines = []
|
||||||
for chunk in stream:
|
for chunk in stream:
|
||||||
lines.append(chunk.get('stream'))
|
lines.append(chunk.get('stream'))
|
||||||
expected = '{0}{2}\n{1}'.format(
|
expected = f'{control_chars[0]}{snippet}\n{control_chars[1]}'
|
||||||
control_chars[0], control_chars[1], snippet
|
|
||||||
)
|
|
||||||
assert any(line == expected for line in lines)
|
assert any(line == expected for line in lines)
|
||||||
|
|
||||||
def test_build_gzip_encoding(self):
|
def test_build_gzip_encoding(self):
|
||||||
|
|
|
@ -5,9 +5,10 @@ import unittest
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
from .. import helpers
|
|
||||||
from docker.utils import kwargs_from_env
|
from docker.utils import kwargs_from_env
|
||||||
|
|
||||||
|
from .. import helpers
|
||||||
|
|
||||||
TEST_IMG = 'alpine:3.10'
|
TEST_IMG = 'alpine:3.10'
|
||||||
TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')
|
TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import docker
|
|
||||||
import paramiko.ssh_exception
|
import paramiko.ssh_exception
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
from .base import TEST_API_VERSION
|
from .base import TEST_API_VERSION
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,181 +2,206 @@ import gzip
|
||||||
import io
|
import io
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import docker
|
|
||||||
from docker import auth
|
|
||||||
from docker.api.build import process_dockerfile
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import docker
|
||||||
|
from docker import auth, errors
|
||||||
|
from docker.api.build import process_dockerfile
|
||||||
|
|
||||||
from ..helpers import make_tree
|
from ..helpers import make_tree
|
||||||
from .api_test import BaseAPIClientTest, fake_request, url_prefix
|
from .api_test import BaseAPIClientTest, fake_request, url_prefix
|
||||||
|
|
||||||
|
|
||||||
class BuildTest(BaseAPIClientTest):
|
class BuildTest(BaseAPIClientTest):
|
||||||
def test_build_container(self):
|
def test_build_container(self):
|
||||||
script = io.BytesIO('\n'.join([
|
script = io.BytesIO(
|
||||||
'FROM busybox',
|
"\n".join(
|
||||||
'RUN mkdir -p /tmp/test',
|
[
|
||||||
'EXPOSE 8080',
|
"FROM busybox",
|
||||||
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
|
"RUN mkdir -p /tmp/test",
|
||||||
' /tmp/silence.tar.gz'
|
"EXPOSE 8080",
|
||||||
]).encode('ascii'))
|
"ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz"
|
||||||
|
" /tmp/silence.tar.gz",
|
||||||
|
]
|
||||||
|
).encode("ascii")
|
||||||
|
)
|
||||||
|
|
||||||
self.client.build(fileobj=script)
|
self.client.build(fileobj=script)
|
||||||
|
|
||||||
def test_build_container_pull(self):
|
def test_build_container_pull(self):
|
||||||
script = io.BytesIO('\n'.join([
|
script = io.BytesIO(
|
||||||
'FROM busybox',
|
"\n".join(
|
||||||
'RUN mkdir -p /tmp/test',
|
[
|
||||||
'EXPOSE 8080',
|
"FROM busybox",
|
||||||
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
|
"RUN mkdir -p /tmp/test",
|
||||||
' /tmp/silence.tar.gz'
|
"EXPOSE 8080",
|
||||||
]).encode('ascii'))
|
"ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz"
|
||||||
|
" /tmp/silence.tar.gz",
|
||||||
|
]
|
||||||
|
).encode("ascii")
|
||||||
|
)
|
||||||
|
|
||||||
self.client.build(fileobj=script, pull=True)
|
self.client.build(fileobj=script, pull=True)
|
||||||
|
|
||||||
def test_build_container_custom_context(self):
|
def test_build_container_custom_context(self):
|
||||||
script = io.BytesIO('\n'.join([
|
script = io.BytesIO(
|
||||||
'FROM busybox',
|
"\n".join(
|
||||||
'RUN mkdir -p /tmp/test',
|
[
|
||||||
'EXPOSE 8080',
|
"FROM busybox",
|
||||||
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
|
"RUN mkdir -p /tmp/test",
|
||||||
' /tmp/silence.tar.gz'
|
"EXPOSE 8080",
|
||||||
]).encode('ascii'))
|
"ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz"
|
||||||
|
" /tmp/silence.tar.gz",
|
||||||
|
]
|
||||||
|
).encode("ascii")
|
||||||
|
)
|
||||||
context = docker.utils.mkbuildcontext(script)
|
context = docker.utils.mkbuildcontext(script)
|
||||||
|
|
||||||
self.client.build(fileobj=context, custom_context=True)
|
self.client.build(fileobj=context, custom_context=True)
|
||||||
|
|
||||||
def test_build_container_custom_context_gzip(self):
|
def test_build_container_custom_context_gzip(self):
|
||||||
script = io.BytesIO('\n'.join([
|
script = io.BytesIO(
|
||||||
'FROM busybox',
|
"\n".join(
|
||||||
'RUN mkdir -p /tmp/test',
|
[
|
||||||
'EXPOSE 8080',
|
"FROM busybox",
|
||||||
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
|
"RUN mkdir -p /tmp/test",
|
||||||
' /tmp/silence.tar.gz'
|
"EXPOSE 8080",
|
||||||
]).encode('ascii'))
|
"ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz"
|
||||||
|
" /tmp/silence.tar.gz",
|
||||||
|
]
|
||||||
|
).encode("ascii")
|
||||||
|
)
|
||||||
context = docker.utils.mkbuildcontext(script)
|
context = docker.utils.mkbuildcontext(script)
|
||||||
gz_context = gzip.GzipFile(fileobj=context)
|
gz_context = gzip.GzipFile(fileobj=context)
|
||||||
|
|
||||||
self.client.build(
|
self.client.build(fileobj=gz_context, custom_context=True, encoding="gzip")
|
||||||
fileobj=gz_context,
|
|
||||||
custom_context=True,
|
|
||||||
encoding="gzip"
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_build_remote_with_registry_auth(self):
|
def test_build_remote_with_registry_auth(self):
|
||||||
self.client._auth_configs = auth.AuthConfig({
|
self.client._auth_configs = auth.AuthConfig(
|
||||||
'auths': {
|
{
|
||||||
'https://example.com': {
|
"auths": {
|
||||||
'user': 'example',
|
"https://example.com": {
|
||||||
'password': 'example',
|
"user": "example",
|
||||||
'email': 'example@example.com'
|
"password": "example",
|
||||||
|
"email": "example@example.com",
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
|
||||||
|
|
||||||
expected_params = {'t': None, 'q': False, 'dockerfile': None,
|
|
||||||
'rm': False, 'nocache': False, 'pull': False,
|
|
||||||
'forcerm': False,
|
|
||||||
'remote': 'https://github.com/docker-library/mongo'}
|
|
||||||
expected_headers = {
|
|
||||||
'X-Registry-Config': auth.encode_header(
|
|
||||||
self.client._auth_configs.auths
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
expected_params = {
|
||||||
|
"t": None,
|
||||||
|
"q": False,
|
||||||
|
"dockerfile": None,
|
||||||
|
"rm": False,
|
||||||
|
"nocache": False,
|
||||||
|
"pull": False,
|
||||||
|
"forcerm": False,
|
||||||
|
"remote": "https://github.com/docker-library/mongo",
|
||||||
|
}
|
||||||
|
expected_headers = {
|
||||||
|
"X-Registry-Config": auth.encode_header(self.client._auth_configs.auths)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.client.build(path='https://github.com/docker-library/mongo')
|
self.client.build(path="https://github.com/docker-library/mongo")
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
"POST",
|
||||||
f"{url_prefix}build",
|
f"{url_prefix}build",
|
||||||
stream=True,
|
stream=True,
|
||||||
data=None,
|
data=None,
|
||||||
headers=expected_headers,
|
headers=expected_headers,
|
||||||
params=expected_params,
|
params=expected_params,
|
||||||
timeout=None
|
timeout=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_build_container_with_named_dockerfile(self):
|
def test_build_container_with_named_dockerfile(self):
|
||||||
self.client.build('.', dockerfile='nameddockerfile')
|
self.client.build(".", dockerfile="nameddockerfile")
|
||||||
|
|
||||||
|
def test_build_with_invalid_tag(self):
|
||||||
|
with pytest.raises(errors.DockerException):
|
||||||
|
self.client.build(".", tag="https://example.com")
|
||||||
|
|
||||||
def test_build_container_with_container_limits(self):
|
def test_build_container_with_container_limits(self):
|
||||||
self.client.build('.', container_limits={
|
self.client.build(
|
||||||
'memory': 1024 * 1024,
|
".",
|
||||||
'cpusetcpus': 1,
|
container_limits={
|
||||||
'cpushares': 1000,
|
"memory": 1024 * 1024,
|
||||||
'memswap': 1024 * 1024 * 8
|
"cpusetcpus": 1,
|
||||||
})
|
"cpushares": 1000,
|
||||||
|
"memswap": 1024 * 1024 * 8,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
def test_build_container_invalid_container_limits(self):
|
def test_build_container_invalid_container_limits(self):
|
||||||
with pytest.raises(docker.errors.DockerException):
|
with pytest.raises(docker.errors.DockerException):
|
||||||
self.client.build('.', container_limits={
|
self.client.build(".", container_limits={"foo": "bar"})
|
||||||
'foo': 'bar'
|
|
||||||
})
|
|
||||||
|
|
||||||
def test_set_auth_headers_with_empty_dict_and_auth_configs(self):
|
def test_set_auth_headers_with_empty_dict_and_auth_configs(self):
|
||||||
self.client._auth_configs = auth.AuthConfig({
|
self.client._auth_configs = auth.AuthConfig(
|
||||||
'auths': {
|
{
|
||||||
'https://example.com': {
|
"auths": {
|
||||||
'user': 'example',
|
"https://example.com": {
|
||||||
'password': 'example',
|
"user": "example",
|
||||||
'email': 'example@example.com'
|
"password": "example",
|
||||||
|
"email": "example@example.com",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
headers = {}
|
headers = {}
|
||||||
expected_headers = {
|
expected_headers = {
|
||||||
'X-Registry-Config': auth.encode_header(
|
"X-Registry-Config": auth.encode_header(self.client._auth_configs.auths)
|
||||||
self.client._auth_configs.auths
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.client._set_auth_headers(headers)
|
self.client._set_auth_headers(headers)
|
||||||
assert headers == expected_headers
|
assert headers == expected_headers
|
||||||
|
|
||||||
def test_set_auth_headers_with_dict_and_auth_configs(self):
|
def test_set_auth_headers_with_dict_and_auth_configs(self):
|
||||||
self.client._auth_configs = auth.AuthConfig({
|
self.client._auth_configs = auth.AuthConfig(
|
||||||
'auths': {
|
{
|
||||||
'https://example.com': {
|
"auths": {
|
||||||
'user': 'example',
|
"https://example.com": {
|
||||||
'password': 'example',
|
"user": "example",
|
||||||
'email': 'example@example.com'
|
"password": "example",
|
||||||
|
"email": "example@example.com",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
headers = {'foo': 'bar'}
|
headers = {"foo": "bar"}
|
||||||
expected_headers = {
|
expected_headers = {
|
||||||
'X-Registry-Config': auth.encode_header(
|
"X-Registry-Config": auth.encode_header(self.client._auth_configs.auths),
|
||||||
self.client._auth_configs.auths
|
"foo": "bar",
|
||||||
),
|
|
||||||
'foo': 'bar'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.client._set_auth_headers(headers)
|
self.client._set_auth_headers(headers)
|
||||||
assert headers == expected_headers
|
assert headers == expected_headers
|
||||||
|
|
||||||
def test_set_auth_headers_with_dict_and_no_auth_configs(self):
|
def test_set_auth_headers_with_dict_and_no_auth_configs(self):
|
||||||
headers = {'foo': 'bar'}
|
headers = {"foo": "bar"}
|
||||||
expected_headers = {
|
expected_headers = {"foo": "bar"}
|
||||||
'foo': 'bar'
|
|
||||||
}
|
|
||||||
|
|
||||||
self.client._set_auth_headers(headers)
|
self.client._set_auth_headers(headers)
|
||||||
assert headers == expected_headers
|
assert headers == expected_headers
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
not docker.constants.IS_WINDOWS_PLATFORM,
|
not docker.constants.IS_WINDOWS_PLATFORM, reason="Windows-specific syntax"
|
||||||
reason='Windows-specific syntax')
|
)
|
||||||
def test_process_dockerfile_win_longpath_prefix(self):
|
def test_process_dockerfile_win_longpath_prefix(self):
|
||||||
dirs = [
|
dirs = [
|
||||||
'foo', 'foo/bar', 'baz',
|
"foo",
|
||||||
|
"foo/bar",
|
||||||
|
"baz",
|
||||||
]
|
]
|
||||||
|
|
||||||
files = [
|
files = [
|
||||||
'Dockerfile', 'foo/Dockerfile.foo', 'foo/bar/Dockerfile.bar',
|
"Dockerfile",
|
||||||
'baz/Dockerfile.baz',
|
"foo/Dockerfile.foo",
|
||||||
|
"foo/bar/Dockerfile.bar",
|
||||||
|
"baz/Dockerfile.baz",
|
||||||
]
|
]
|
||||||
|
|
||||||
base = make_tree(dirs, files)
|
base = make_tree(dirs, files)
|
||||||
|
@ -186,40 +211,42 @@ class BuildTest(BaseAPIClientTest):
|
||||||
return docker.constants.WINDOWS_LONGPATH_PREFIX + path
|
return docker.constants.WINDOWS_LONGPATH_PREFIX + path
|
||||||
|
|
||||||
assert process_dockerfile(None, pre(base)) == (None, None)
|
assert process_dockerfile(None, pre(base)) == (None, None)
|
||||||
assert process_dockerfile('Dockerfile', pre(base)) == (
|
assert process_dockerfile("Dockerfile", pre(base)) == ("Dockerfile", None)
|
||||||
'Dockerfile', None
|
assert process_dockerfile("foo/Dockerfile.foo", pre(base)) == (
|
||||||
|
"foo/Dockerfile.foo",
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
assert process_dockerfile('foo/Dockerfile.foo', pre(base)) == (
|
assert process_dockerfile("../Dockerfile", pre(f"{base}\\foo"))[1] is not None
|
||||||
'foo/Dockerfile.foo', None
|
assert process_dockerfile("../baz/Dockerfile.baz", pre(f"{base}/baz")) == (
|
||||||
|
"../baz/Dockerfile.baz",
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
assert process_dockerfile(
|
|
||||||
'../Dockerfile', pre(f"{base}\\foo")
|
|
||||||
)[1] is not None
|
|
||||||
assert process_dockerfile(
|
|
||||||
'../baz/Dockerfile.baz', pre(f"{base}/baz")
|
|
||||||
) == ('../baz/Dockerfile.baz', None)
|
|
||||||
|
|
||||||
def test_process_dockerfile(self):
|
def test_process_dockerfile(self):
|
||||||
dirs = [
|
dirs = [
|
||||||
'foo', 'foo/bar', 'baz',
|
"foo",
|
||||||
|
"foo/bar",
|
||||||
|
"baz",
|
||||||
]
|
]
|
||||||
|
|
||||||
files = [
|
files = [
|
||||||
'Dockerfile', 'foo/Dockerfile.foo', 'foo/bar/Dockerfile.bar',
|
"Dockerfile",
|
||||||
'baz/Dockerfile.baz',
|
"foo/Dockerfile.foo",
|
||||||
|
"foo/bar/Dockerfile.bar",
|
||||||
|
"baz/Dockerfile.baz",
|
||||||
]
|
]
|
||||||
|
|
||||||
base = make_tree(dirs, files)
|
base = make_tree(dirs, files)
|
||||||
self.addCleanup(shutil.rmtree, base)
|
self.addCleanup(shutil.rmtree, base)
|
||||||
|
|
||||||
assert process_dockerfile(None, base) == (None, None)
|
assert process_dockerfile(None, base) == (None, None)
|
||||||
assert process_dockerfile('Dockerfile', base) == ('Dockerfile', None)
|
assert process_dockerfile("Dockerfile", base) == ("Dockerfile", None)
|
||||||
assert process_dockerfile('foo/Dockerfile.foo', base) == (
|
assert process_dockerfile("foo/Dockerfile.foo", base) == (
|
||||||
'foo/Dockerfile.foo', None
|
"foo/Dockerfile.foo",
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
assert process_dockerfile(
|
assert process_dockerfile("../Dockerfile", f"{base}/foo")[1] is not None
|
||||||
'../Dockerfile', f"{base}/foo"
|
assert process_dockerfile("../baz/Dockerfile.baz", f"{base}/baz") == (
|
||||||
)[1] is not None
|
"../baz/Dockerfile.baz",
|
||||||
assert process_dockerfile('../baz/Dockerfile.baz', f"{base}/baz") == (
|
None,
|
||||||
'../baz/Dockerfile.baz', None
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,17 +1,22 @@
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
import signal
|
import signal
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
from docker.api import APIClient
|
from docker.api import APIClient
|
||||||
from unittest import mock
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from . import fake_api
|
|
||||||
from ..helpers import requires_api_version
|
from ..helpers import requires_api_version
|
||||||
|
from . import fake_api
|
||||||
from .api_test import (
|
from .api_test import (
|
||||||
BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS,
|
DEFAULT_TIMEOUT_SECONDS,
|
||||||
fake_inspect_container, url_base
|
BaseAPIClientTest,
|
||||||
|
fake_inspect_container,
|
||||||
|
fake_request,
|
||||||
|
url_base,
|
||||||
|
url_prefix,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,10 @@ import json
|
||||||
|
|
||||||
from . import fake_api
|
from . import fake_api
|
||||||
from .api_test import (
|
from .api_test import (
|
||||||
BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS,
|
DEFAULT_TIMEOUT_SECONDS,
|
||||||
|
BaseAPIClientTest,
|
||||||
|
fake_request,
|
||||||
|
url_prefix,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,17 @@
|
||||||
import docker
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from . import fake_api
|
import docker
|
||||||
from docker import auth
|
from docker import auth
|
||||||
from unittest import mock
|
|
||||||
|
from . import fake_api
|
||||||
from .api_test import (
|
from .api_test import (
|
||||||
BaseAPIClientTest, fake_request, DEFAULT_TIMEOUT_SECONDS, url_prefix,
|
DEFAULT_TIMEOUT_SECONDS,
|
||||||
fake_resolve_authconfig
|
BaseAPIClientTest,
|
||||||
|
fake_request,
|
||||||
|
fake_resolve_authconfig,
|
||||||
|
url_prefix,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from .api_test import BaseAPIClientTest, url_prefix, response
|
|
||||||
from docker.types import IPAMConfig, IPAMPool
|
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
from docker.types import IPAMConfig, IPAMPool
|
||||||
|
|
||||||
|
from .api_test import BaseAPIClientTest, response, url_prefix
|
||||||
|
|
||||||
|
|
||||||
class NetworkTest(BaseAPIClientTest):
|
class NetworkTest(BaseAPIClientTest):
|
||||||
def test_list_networks(self):
|
def test_list_networks(self):
|
||||||
|
|
|
@ -1,29 +1,29 @@
|
||||||
import datetime
|
import datetime
|
||||||
|
import http.server
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
|
import socketserver
|
||||||
import struct
|
import struct
|
||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
import socketserver
|
from unittest import mock
|
||||||
import http.server
|
|
||||||
|
|
||||||
import docker
|
|
||||||
import pytest
|
import pytest
|
||||||
import requests
|
import requests
|
||||||
import urllib3
|
import urllib3
|
||||||
|
|
||||||
|
import docker
|
||||||
from docker.api import APIClient
|
from docker.api import APIClient
|
||||||
from docker.constants import DEFAULT_DOCKER_API_VERSION
|
from docker.constants import DEFAULT_DOCKER_API_VERSION
|
||||||
from unittest import mock
|
|
||||||
|
|
||||||
from . import fake_api
|
from . import fake_api
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS
|
DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS
|
||||||
|
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ def fake_delete(self, url, *args, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
def fake_read_from_socket(self, response, stream, tty=False, demux=False):
|
def fake_read_from_socket(self, response, stream, tty=False, demux=False):
|
||||||
return bytes()
|
return b''
|
||||||
|
|
||||||
|
|
||||||
url_base = f'{fake_api.prefix}/'
|
url_base = f'{fake_api.prefix}/'
|
||||||
|
|
|
@ -3,7 +3,7 @@ import json
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from ..helpers import requires_api_version
|
from ..helpers import requires_api_version
|
||||||
from .api_test import BaseAPIClientTest, url_prefix, fake_request
|
from .api_test import BaseAPIClientTest, fake_request, url_prefix
|
||||||
|
|
||||||
|
|
||||||
class VolumeTest(BaseAPIClientTest):
|
class VolumeTest(BaseAPIClientTest):
|
||||||
|
|
|
@ -6,10 +6,11 @@ import random
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import unittest
|
import unittest
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from docker import auth, credentials, errors
|
from docker import auth, credentials, errors
|
||||||
from unittest import mock
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class RegressionTest(unittest.TestCase):
|
class RegressionTest(unittest.TestCase):
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue