mirror of https://github.com/docker/docker-py.git
Compare commits
140 Commits
Author | SHA1 | Date |
---|---|---|
|
6e6a273573 | |
|
526a9db743 | |
|
e5c3eb18b6 | |
|
820769e23c | |
|
db7f8b8bb6 | |
|
747d23b9d7 | |
|
fad84c371a | |
|
5a8a42466e | |
|
03e43be6af | |
|
80a584651b | |
|
8ee28517c7 | |
|
d9f9b965b2 | |
|
fba6ffe297 | |
|
99ce2e6d56 | |
|
504ce6193c | |
|
bb0edd1f66 | |
|
e47e966e94 | |
|
a8bac88221 | |
|
e031cf0c23 | |
|
b1265470e6 | |
|
6bbf741c8c | |
|
96ef4d3bee | |
|
a3652028b1 | |
|
1ab40c8e92 | |
|
b33088e0ca | |
|
45488acfc1 | |
|
20879eca6a | |
|
4f2a26d21e | |
|
7785ad913d | |
|
d8e9bcb278 | |
|
2a059a9f19 | |
|
e33e0a437e | |
|
b86573a3e3 | |
|
e34bcf20d9 | |
|
205d2f2bd0 | |
|
b6464dbed9 | |
|
9ad4bddc9e | |
|
336e65fc3c | |
|
4c6437d292 | |
|
0fd79c8c0d | |
|
3d79ce8c60 | |
|
dd82f9ae8e | |
|
e91b280074 | |
|
cb21af7f69 | |
|
1818712b8c | |
|
d50cc429c2 | |
|
047df6b0d3 | |
|
ae45d477c4 | |
|
f128956034 | |
|
bd164f928a | |
|
249654d4d9 | |
|
694d9792e6 | |
|
eeb9ea1937 | |
|
08956b5fbc | |
|
b8a6987cd5 | |
|
f467fd9df9 | |
|
3ec5a6849a | |
|
1784cc2962 | |
|
6ceb08273c | |
|
097382b973 | |
|
0fad869cc6 | |
|
2a5f354b50 | |
|
7d8a161b12 | |
|
5388413dde | |
|
3d0a3f1d77 | |
|
a9b5494fd0 | |
|
cb8f2c6630 | |
|
7140969239 | |
|
586988ce2d | |
|
fd2f5029f0 | |
|
db4878118b | |
|
976c84c481 | |
|
b3349c88ef | |
|
b2378db7f1 | |
|
911f866f72 | |
|
26e07251d4 | |
|
c9e3efddb8 | |
|
4a88112345 | |
|
b70cbd0129 | |
|
7752996f78 | |
|
5abae2dc8e | |
|
c38656dc78 | |
|
378325363e | |
|
0f0b20a6a7 | |
|
bea63224e0 | |
|
8b9ad7807f | |
|
c68d532f54 | |
|
a9a3775b15 | |
|
3948540c89 | |
|
0566f1260c | |
|
cc76c9c20d | |
|
09f12f2046 | |
|
6aec90a41b | |
|
8447f7b0f0 | |
|
601476733c | |
|
ec58856ee3 | |
|
fad792bfc7 | |
|
9313536601 | |
|
8a3402c049 | |
|
ee2310595d | |
|
dbc061f4fa | |
|
4571f7f9b4 | |
|
0618951093 | |
|
806d36a8cd | |
|
79c4c38b42 | |
|
62b4bb8489 | |
|
5064995bc4 | |
|
54ec0c6bf7 | |
|
83e93228ea | |
|
fb974de27a | |
|
f0d38fb7f4 | |
|
84414e343e | |
|
78439ebbe1 | |
|
0318ad8e7e | |
|
8ca9c6394f | |
|
bc4c0d7cf4 | |
|
14e8d07d45 | |
|
c5e582c413 | |
|
9cadad009e | |
|
443a35360f | |
|
e011ff5be8 | |
|
7870503c52 | |
|
a18f91bf08 | |
|
a662d5a305 | |
|
1d697680d2 | |
|
576e47aaac | |
|
3178c8d48b | |
|
a02ba74333 | |
|
aaf68b7f98 | |
|
f84623225e | |
|
7cd7458f2f | |
|
e9d4ddfaec | |
|
aca129dd69 | |
|
ee9151f336 | |
|
34e6829dd4 | |
|
22718ba59a | |
|
d38b41a13c | |
|
3afb4b61c3 | |
|
82cf559b5a | |
|
8590eaad3c |
|
@ -1,6 +0,0 @@
|
|||
# GitHub code owners
|
||||
# See https://help.github.com/articles/about-codeowners/
|
||||
#
|
||||
# KEEP THIS FILE SORTED. Order is important. Last match takes precedence.
|
||||
|
||||
* @aiordache @ulyssessouza
|
|
@ -4,35 +4,50 @@ on: [push, pull_request]
|
|||
|
||||
env:
|
||||
DOCKER_BUILDKIT: '1'
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
flake8:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install -U flake8
|
||||
- name: Run flake8
|
||||
run: flake8 docker/ tests/
|
||||
- run: pip install -U ruff==0.1.8
|
||||
- name: Run ruff
|
||||
run: ruff docker tests
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip3 install build && python -m build .
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dist
|
||||
path: dist
|
||||
|
||||
unit-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11.0-alpha - 3.11.0"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
allow-prereleases: true
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip
|
||||
pip3 install -r test-requirements.txt -r requirements.txt
|
||||
pip3 install '.[ssh,dev]'
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
docker logout
|
||||
|
@ -46,7 +61,10 @@ jobs:
|
|||
variant: [ "integration-dind", "integration-dind-ssl", "integration-dind-ssh" ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
- name: make ${{ matrix.variant }}
|
||||
run: |
|
||||
docker logout
|
||||
|
|
|
@ -12,22 +12,28 @@ on:
|
|||
type: boolean
|
||||
default: true
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: '1'
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Generate Pacakge
|
||||
- name: Generate Package
|
||||
run: |
|
||||
pip3 install wheel
|
||||
python setup.py sdist bdist_wheel
|
||||
pip3 install build
|
||||
python -m build .
|
||||
env:
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION_FOR_DOCKER: ${{ inputs.tag }}
|
||||
# This is also supported by Hatch; see
|
||||
# https://github.com/ofek/hatch-vcs#version-source-environment-variables
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ inputs.tag }}
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
|
|
|
@ -4,14 +4,14 @@ sphinx:
|
|||
configuration: docs/conf.py
|
||||
|
||||
build:
|
||||
os: ubuntu-20.04
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: '3.10'
|
||||
python: '3.12'
|
||||
|
||||
python:
|
||||
install:
|
||||
- requirements: docs-requirements.txt
|
||||
- method: pip
|
||||
path: .
|
||||
extra_requirements:
|
||||
- ssh
|
||||
- docs
|
||||
|
|
|
@ -44,7 +44,7 @@ paragraph in the Docker contribution guidelines.
|
|||
Before we can review your pull request, please ensure that nothing has been
|
||||
broken by your changes by running the test suite. You can do so simply by
|
||||
running `make test` in the project root. This also includes coding style using
|
||||
`flake8`
|
||||
`ruff`
|
||||
|
||||
### 3. Write clear, self-contained commits
|
||||
|
||||
|
|
18
Dockerfile
18
Dockerfile
|
@ -1,17 +1,13 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG PYTHON_VERSION=3.10
|
||||
|
||||
ARG PYTHON_VERSION=3.12
|
||||
FROM python:${PYTHON_VERSION}
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
COPY requirements.txt /src/requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY test-requirements.txt /src/test-requirements.txt
|
||||
RUN pip install --no-cache-dir -r test-requirements.txt
|
||||
|
||||
COPY . .
|
||||
ARG SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER
|
||||
RUN pip install --no-cache-dir .
|
||||
|
||||
ARG VERSION=0.0.0.dev0
|
||||
RUN --mount=type=cache,target=/cache/pip \
|
||||
PIP_CACHE_DIR=/cache/pip \
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION=${VERSION} \
|
||||
pip install .[ssh]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG PYTHON_VERSION=3.10
|
||||
ARG PYTHON_VERSION=3.12
|
||||
|
||||
FROM python:${PYTHON_VERSION}
|
||||
|
||||
|
@ -11,7 +11,12 @@ RUN addgroup --gid $gid sphinx \
|
|||
&& useradd --uid $uid --gid $gid -M sphinx
|
||||
|
||||
WORKDIR /src
|
||||
COPY requirements.txt docs-requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt -r docs-requirements.txt
|
||||
COPY . .
|
||||
|
||||
ARG VERSION=0.0.0.dev0
|
||||
RUN --mount=type=cache,target=/cache/pip \
|
||||
PIP_CACHE_DIR=/cache/pip \
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION=${VERSION} \
|
||||
pip install .[ssh,docs]
|
||||
|
||||
USER sphinx
|
||||
|
|
|
@ -1,147 +0,0 @@
|
|||
#!groovy
|
||||
|
||||
def imageNameBase = "dockerpinata/docker-py"
|
||||
def imageNamePy3
|
||||
def imageDindSSH
|
||||
def images = [:]
|
||||
|
||||
def buildImage = { name, buildargs, pyTag ->
|
||||
img = docker.image(name)
|
||||
try {
|
||||
img.pull()
|
||||
} catch (Exception exc) {
|
||||
img = docker.build(name, buildargs)
|
||||
img.push()
|
||||
}
|
||||
if (pyTag?.trim()) images[pyTag] = img.id
|
||||
}
|
||||
|
||||
def buildImages = { ->
|
||||
wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) {
|
||||
stage("build image") {
|
||||
checkout(scm)
|
||||
|
||||
imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}"
|
||||
imageDindSSH = "${imageNameBase}:sshdind-${gitCommit()}"
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
buildImage(imageDindSSH, "-f tests/Dockerfile-ssh-dind .", "")
|
||||
buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.10 .", "py3.10")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getDockerVersions = { ->
|
||||
def dockerVersions = ["19.03.12"]
|
||||
wrappedNode(label: "amd64 && ubuntu-2004 && overlay2") {
|
||||
def result = sh(script: """docker run --rm \\
|
||||
--entrypoint=python \\
|
||||
${imageNamePy3} \\
|
||||
/src/scripts/versions.py
|
||||
""", returnStdout: true
|
||||
)
|
||||
dockerVersions = dockerVersions + result.trim().tokenize(' ')
|
||||
}
|
||||
return dockerVersions
|
||||
}
|
||||
|
||||
def getAPIVersion = { engineVersion ->
|
||||
def versionMap = [
|
||||
'18.09': '1.39',
|
||||
'19.03': '1.40'
|
||||
]
|
||||
def result = versionMap[engineVersion.substring(0, 5)]
|
||||
if (!result) {
|
||||
return '1.40'
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
def runTests = { Map settings ->
|
||||
def dockerVersion = settings.get("dockerVersion", null)
|
||||
def pythonVersion = settings.get("pythonVersion", null)
|
||||
def testImage = settings.get("testImage", null)
|
||||
def apiVersion = getAPIVersion(dockerVersion)
|
||||
|
||||
if (!testImage) {
|
||||
throw new Exception("Need test image object, e.g.: `runTests(testImage: img)`")
|
||||
}
|
||||
if (!dockerVersion) {
|
||||
throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '19.03.12')`")
|
||||
}
|
||||
if (!pythonVersion) {
|
||||
throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py3.x')`")
|
||||
}
|
||||
|
||||
{ ->
|
||||
wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) {
|
||||
stage("test python=${pythonVersion} / docker=${dockerVersion}") {
|
||||
checkout(scm)
|
||||
def dindContainerName = "dpy-dind-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}"
|
||||
def testContainerName = "dpy-tests-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}"
|
||||
def testNetwork = "dpy-testnet-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}"
|
||||
withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
|
||||
try {
|
||||
// unit tests
|
||||
sh """docker run --rm \\
|
||||
-e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\
|
||||
${testImage} \\
|
||||
py.test -v -rxs --cov=docker tests/unit
|
||||
"""
|
||||
// integration tests
|
||||
sh """docker network create ${testNetwork}"""
|
||||
sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\
|
||||
${imageDindSSH} dockerd -H tcp://0.0.0.0:2375
|
||||
"""
|
||||
sh """docker run --rm \\
|
||||
--name ${testContainerName} \\
|
||||
-e "DOCKER_HOST=tcp://${dindContainerName}:2375" \\
|
||||
-e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\
|
||||
--network ${testNetwork} \\
|
||||
--volumes-from ${dindContainerName} \\
|
||||
-v $DOCKER_CONFIG/config.json:/root/.docker/config.json \\
|
||||
${testImage} \\
|
||||
py.test -v -rxs --cov=docker tests/integration
|
||||
"""
|
||||
sh """docker stop ${dindContainerName}"""
|
||||
// start DIND container with SSH
|
||||
sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\
|
||||
${imageDindSSH} dockerd --experimental"""
|
||||
sh """docker exec ${dindContainerName} sh -c /usr/sbin/sshd """
|
||||
// run SSH tests only
|
||||
sh """docker run --rm \\
|
||||
--name ${testContainerName} \\
|
||||
-e "DOCKER_HOST=ssh://${dindContainerName}:22" \\
|
||||
-e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\
|
||||
--network ${testNetwork} \\
|
||||
--volumes-from ${dindContainerName} \\
|
||||
-v $DOCKER_CONFIG/config.json:/root/.docker/config.json \\
|
||||
${testImage} \\
|
||||
py.test -v -rxs --cov=docker tests/ssh
|
||||
"""
|
||||
} finally {
|
||||
sh """
|
||||
docker stop ${dindContainerName}
|
||||
docker network rm ${testNetwork}
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
buildImages()
|
||||
|
||||
def dockerVersions = getDockerVersions()
|
||||
|
||||
def testMatrix = [failFast: false]
|
||||
|
||||
for (imgKey in new ArrayList(images.keySet())) {
|
||||
for (version in dockerVersions) {
|
||||
testMatrix["${imgKey}_${version}"] = runTests([testImage: images[imgKey], dockerVersion: version, pythonVersion: imgKey])
|
||||
}
|
||||
}
|
||||
|
||||
parallel(testMatrix)
|
16
MAINTAINERS
16
MAINTAINERS
|
@ -11,17 +11,19 @@
|
|||
[Org]
|
||||
[Org."Core maintainers"]
|
||||
people = [
|
||||
"aiordache",
|
||||
"ulyssessouza",
|
||||
"glours",
|
||||
"milas",
|
||||
]
|
||||
[Org.Alumni]
|
||||
people = [
|
||||
"aiordache",
|
||||
"aanand",
|
||||
"bfirsh",
|
||||
"dnephin",
|
||||
"mnowster",
|
||||
"mpetazzoni",
|
||||
"shin-",
|
||||
"ulyssessouza",
|
||||
]
|
||||
|
||||
[people]
|
||||
|
@ -52,6 +54,16 @@
|
|||
Email = "dnephin@gmail.com"
|
||||
GitHub = "dnephin"
|
||||
|
||||
[people.glours]
|
||||
Name = "Guillaume Lours"
|
||||
Email = "705411+glours@users.noreply.github.com"
|
||||
GitHub = "glours"
|
||||
|
||||
[people.milas]
|
||||
Name = "Milas Bowman"
|
||||
Email = "devnull@milas.dev"
|
||||
GitHub = "milas"
|
||||
|
||||
[people.mnowster]
|
||||
Name = "Mazz Mosley"
|
||||
Email = "mazz@houseofmnowster.com"
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
include test-requirements.txt
|
||||
include requirements.txt
|
||||
include README.md
|
||||
include README.rst
|
||||
include LICENSE
|
||||
recursive-include tests *.py
|
||||
recursive-include tests/unit/testdata *
|
||||
recursive-include tests/integration/testdata *
|
||||
recursive-include tests/gpg-keys *
|
71
Makefile
71
Makefile
|
@ -1,5 +1,5 @@
|
|||
TEST_API_VERSION ?= 1.41
|
||||
TEST_ENGINE_VERSION ?= 20.10
|
||||
TEST_API_VERSION ?= 1.45
|
||||
TEST_ENGINE_VERSION ?= 26.1
|
||||
|
||||
ifeq ($(OS),Windows_NT)
|
||||
PLATFORM := Windows
|
||||
|
@ -11,12 +11,17 @@ ifeq ($(PLATFORM),Linux)
|
|||
uid_args := "--build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g)"
|
||||
endif
|
||||
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER ?= $(shell git describe --match '[0-9]*' --dirty='.m' --always --tags 2>/dev/null | sed -r 's/-([0-9]+)/.dev\1/' | sed 's/-/+/')
|
||||
ifeq ($(SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER),)
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER = "0.0.0.dev0"
|
||||
endif
|
||||
|
||||
.PHONY: all
|
||||
all: test
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
-docker rm -f dpy-dind-py3 dpy-dind-certs dpy-dind-ssl
|
||||
-docker rm -f dpy-dind dpy-dind-certs dpy-dind-ssl
|
||||
find -name "__pycache__" | xargs rm -rf
|
||||
|
||||
.PHONY: build-dind-ssh
|
||||
|
@ -25,35 +30,46 @@ build-dind-ssh:
|
|||
--pull \
|
||||
-t docker-dind-ssh \
|
||||
-f tests/Dockerfile-ssh-dind \
|
||||
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
|
||||
--build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} \
|
||||
--build-arg API_VERSION=${TEST_API_VERSION} \
|
||||
--build-arg APT_MIRROR .
|
||||
.
|
||||
|
||||
.PHONY: build-py3
|
||||
build-py3:
|
||||
.PHONY: build
|
||||
build:
|
||||
docker build \
|
||||
--pull \
|
||||
-t docker-sdk-python3 \
|
||||
-f tests/Dockerfile \
|
||||
--build-arg APT_MIRROR .
|
||||
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
|
||||
.
|
||||
|
||||
.PHONY: build-docs
|
||||
build-docs:
|
||||
docker build -t docker-sdk-python-docs -f Dockerfile-docs $(uid_args) .
|
||||
docker build \
|
||||
-t docker-sdk-python-docs \
|
||||
-f Dockerfile-docs \
|
||||
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
|
||||
$(uid_args) \
|
||||
.
|
||||
|
||||
.PHONY: build-dind-certs
|
||||
build-dind-certs:
|
||||
docker build -t dpy-dind-certs -f tests/Dockerfile-dind-certs .
|
||||
docker build \
|
||||
-t dpy-dind-certs \
|
||||
-f tests/Dockerfile-dind-certs \
|
||||
--build-arg VERSION=${SETUPTOOLS_SCM_PRETEND_VERSION_DOCKER} \
|
||||
.
|
||||
|
||||
.PHONY: test
|
||||
test: flake8 unit-test-py3 integration-dind integration-dind-ssl
|
||||
test: ruff unit-test integration-dind integration-dind-ssl
|
||||
|
||||
.PHONY: unit-test-py3
|
||||
unit-test-py3: build-py3
|
||||
.PHONY: unit-test
|
||||
unit-test: build
|
||||
docker run -t --rm docker-sdk-python3 py.test tests/unit
|
||||
|
||||
.PHONY: integration-test-py3
|
||||
integration-test-py3: build-py3
|
||||
.PHONY: integration-test
|
||||
integration-test: build
|
||||
docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file}
|
||||
|
||||
.PHONY: setup-network
|
||||
|
@ -61,15 +77,12 @@ setup-network:
|
|||
docker network inspect dpy-tests || docker network create dpy-tests
|
||||
|
||||
.PHONY: integration-dind
|
||||
integration-dind: integration-dind-py3
|
||||
|
||||
.PHONY: integration-dind-py3
|
||||
integration-dind-py3: build-py3 setup-network
|
||||
docker rm -vf dpy-dind-py3 || :
|
||||
integration-dind: build setup-network
|
||||
docker rm -vf dpy-dind || :
|
||||
|
||||
docker run \
|
||||
--detach \
|
||||
--name dpy-dind-py3 \
|
||||
--name dpy-dind \
|
||||
--network dpy-tests \
|
||||
--pull=always \
|
||||
--privileged \
|
||||
|
@ -82,10 +95,10 @@ integration-dind-py3: build-py3 setup-network
|
|||
--rm \
|
||||
--tty \
|
||||
busybox \
|
||||
sh -c 'while ! nc -z dpy-dind-py3 2375; do sleep 1; done'
|
||||
sh -c 'while ! nc -z dpy-dind 2375; do sleep 1; done'
|
||||
|
||||
docker run \
|
||||
--env="DOCKER_HOST=tcp://dpy-dind-py3:2375" \
|
||||
--env="DOCKER_HOST=tcp://dpy-dind:2375" \
|
||||
--env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \
|
||||
--network dpy-tests \
|
||||
--rm \
|
||||
|
@ -93,11 +106,11 @@ integration-dind-py3: build-py3 setup-network
|
|||
docker-sdk-python3 \
|
||||
py.test tests/integration/${file}
|
||||
|
||||
docker rm -vf dpy-dind-py3
|
||||
docker rm -vf dpy-dind
|
||||
|
||||
|
||||
.PHONY: integration-dind-ssh
|
||||
integration-dind-ssh: build-dind-ssh build-py3 setup-network
|
||||
integration-dind-ssh: build-dind-ssh build setup-network
|
||||
docker rm -vf dpy-dind-ssh || :
|
||||
docker run -d --network dpy-tests --name dpy-dind-ssh --privileged \
|
||||
docker-dind-ssh dockerd --experimental
|
||||
|
@ -116,7 +129,7 @@ integration-dind-ssh: build-dind-ssh build-py3 setup-network
|
|||
|
||||
|
||||
.PHONY: integration-dind-ssl
|
||||
integration-dind-ssl: build-dind-certs build-py3 setup-network
|
||||
integration-dind-ssl: build-dind-certs build setup-network
|
||||
docker rm -vf dpy-dind-certs dpy-dind-ssl || :
|
||||
docker run -d --name dpy-dind-certs dpy-dind-certs
|
||||
|
||||
|
@ -163,14 +176,14 @@ integration-dind-ssl: build-dind-certs build-py3 setup-network
|
|||
|
||||
docker rm -vf dpy-dind-ssl dpy-dind-certs
|
||||
|
||||
.PHONY: flake8
|
||||
flake8: build-py3
|
||||
docker run -t --rm docker-sdk-python3 flake8 docker tests
|
||||
.PHONY: ruff
|
||||
ruff: build
|
||||
docker run -t --rm docker-sdk-python3 ruff docker tests
|
||||
|
||||
.PHONY: docs
|
||||
docs: build-docs
|
||||
docker run --rm -t -v `pwd`:/src docker-sdk-python-docs sphinx-build docs docs/_build
|
||||
|
||||
.PHONY: shell
|
||||
shell: build-py3
|
||||
shell: build
|
||||
docker run -it -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 python
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# Docker SDK for Python
|
||||
|
||||
[](https://github.com/docker/docker-py/actions/workflows/ci.yml/)
|
||||
[](https://github.com/docker/docker-py/actions/workflows/ci.yml)
|
||||
|
||||
A Python library for the Docker Engine API. It lets you do anything the `docker` command does, but from within Python apps – run containers, manage containers, manage Swarms, etc.
|
||||
|
||||
## Installation
|
||||
|
||||
The latest stable version [is available on PyPI](https://pypi.python.org/pypi/docker/). Either add `docker` to your `requirements.txt` file or install with pip:
|
||||
The latest stable version [is available on PyPI](https://pypi.python.org/pypi/docker/). Install with pip:
|
||||
|
||||
pip install docker
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
# flake8: noqa
|
||||
from .api import APIClient
|
||||
from .client import DockerClient, from_env
|
||||
from .context import Context
|
||||
from .context import ContextAPI
|
||||
from .context import Context, ContextAPI
|
||||
from .tls import TLSConfig
|
||||
from .version import __version__
|
||||
|
||||
|
|
|
@ -1,2 +1 @@
|
|||
# flake8: noqa
|
||||
from .client import APIClient
|
||||
|
|
|
@ -3,11 +3,7 @@ import logging
|
|||
import os
|
||||
import random
|
||||
|
||||
from .. import auth
|
||||
from .. import constants
|
||||
from .. import errors
|
||||
from .. import utils
|
||||
|
||||
from .. import auth, constants, errors, utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -129,13 +125,16 @@ class BuildApiMixin:
|
|||
raise errors.DockerException(
|
||||
'Can not use custom encoding if gzip is enabled'
|
||||
)
|
||||
|
||||
if tag is not None:
|
||||
if not utils.match_tag(tag):
|
||||
raise errors.DockerException(
|
||||
f"invalid tag '{tag}': invalid reference format"
|
||||
)
|
||||
for key in container_limits.keys():
|
||||
if key not in constants.CONTAINER_LIMITS_KEYS:
|
||||
raise errors.DockerException(
|
||||
f'Invalid container_limits key {key}'
|
||||
f"invalid tag '{tag}': invalid reference format"
|
||||
)
|
||||
|
||||
if custom_context:
|
||||
if not fileobj:
|
||||
raise TypeError("You must specify fileobj with custom_context")
|
||||
|
@ -276,10 +275,24 @@ class BuildApiMixin:
|
|||
return self._stream_helper(response, decode=decode)
|
||||
|
||||
@utils.minimum_version('1.31')
|
||||
def prune_builds(self):
|
||||
def prune_builds(self, filters=None, keep_storage=None, all=None):
|
||||
"""
|
||||
Delete the builder cache
|
||||
|
||||
Args:
|
||||
filters (dict): Filters to process on the prune list.
|
||||
Needs Docker API v1.39+
|
||||
Available filters:
|
||||
- dangling (bool): When set to true (or 1), prune only
|
||||
unused and untagged images.
|
||||
- until (str): Can be Unix timestamps, date formatted
|
||||
timestamps, or Go duration strings (e.g. 10m, 1h30m) computed
|
||||
relative to the daemon's local time.
|
||||
keep_storage (int): Amount of disk space in bytes to keep for cache.
|
||||
Needs Docker API v1.39+
|
||||
all (bool): Remove all types of build cache.
|
||||
Needs Docker API v1.39+
|
||||
|
||||
Returns:
|
||||
(dict): A dictionary containing information about the operation's
|
||||
result. The ``SpaceReclaimed`` key indicates the amount of
|
||||
|
@ -290,7 +303,20 @@ class BuildApiMixin:
|
|||
If the server returns an error.
|
||||
"""
|
||||
url = self._url("/build/prune")
|
||||
return self._result(self._post(url), True)
|
||||
if (filters, keep_storage, all) != (None, None, None) \
|
||||
and utils.version_lt(self._version, '1.39'):
|
||||
raise errors.InvalidVersion(
|
||||
'`filters`, `keep_storage`, and `all` args are only available '
|
||||
'for API version > 1.38'
|
||||
)
|
||||
params = {}
|
||||
if filters is not None:
|
||||
params['filters'] = utils.convert_filters(filters)
|
||||
if keep_storage is not None:
|
||||
params['keep-storage'] = keep_storage
|
||||
if all is not None:
|
||||
params['all'] = all
|
||||
return self._result(self._post(url, params=params), True)
|
||||
|
||||
def _set_auth_headers(self, headers):
|
||||
log.debug('Looking for auth config')
|
||||
|
@ -314,9 +340,8 @@ class BuildApiMixin:
|
|||
auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
|
||||
|
||||
log.debug(
|
||||
'Sending auth config ({})'.format(
|
||||
', '.join(repr(k) for k in auth_data.keys())
|
||||
)
|
||||
"Sending auth config (%s)",
|
||||
', '.join(repr(k) for k in auth_data),
|
||||
)
|
||||
|
||||
if auth_data:
|
||||
|
@ -336,12 +361,9 @@ def process_dockerfile(dockerfile, path):
|
|||
abs_dockerfile = os.path.join(path, dockerfile)
|
||||
if constants.IS_WINDOWS_PLATFORM and path.startswith(
|
||||
constants.WINDOWS_LONGPATH_PREFIX):
|
||||
abs_dockerfile = '{}{}'.format(
|
||||
constants.WINDOWS_LONGPATH_PREFIX,
|
||||
os.path.normpath(
|
||||
abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):]
|
||||
)
|
||||
)
|
||||
normpath = os.path.normpath(
|
||||
abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):])
|
||||
abs_dockerfile = f'{constants.WINDOWS_LONGPATH_PREFIX}{normpath}'
|
||||
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
|
||||
os.path.relpath(abs_dockerfile, path).startswith('..')):
|
||||
# Dockerfile not in context - read data to insert into tar later
|
||||
|
|
|
@ -4,18 +4,28 @@ import urllib
|
|||
from functools import partial
|
||||
|
||||
import requests
|
||||
import requests.adapters
|
||||
import requests.exceptions
|
||||
import websocket
|
||||
|
||||
from .. import auth
|
||||
from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH,
|
||||
DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS,
|
||||
DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM,
|
||||
MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES)
|
||||
from ..errors import (DockerException, InvalidVersion, TLSParameterError,
|
||||
create_api_error_from_http_exception)
|
||||
from ..constants import (
|
||||
DEFAULT_MAX_POOL_SIZE,
|
||||
DEFAULT_NUM_POOLS,
|
||||
DEFAULT_NUM_POOLS_SSH,
|
||||
DEFAULT_TIMEOUT_SECONDS,
|
||||
DEFAULT_USER_AGENT,
|
||||
IS_WINDOWS_PLATFORM,
|
||||
MINIMUM_DOCKER_API_VERSION,
|
||||
STREAM_HEADER_SIZE_BYTES,
|
||||
)
|
||||
from ..errors import (
|
||||
DockerException,
|
||||
InvalidVersion,
|
||||
TLSParameterError,
|
||||
create_api_error_from_http_exception,
|
||||
)
|
||||
from ..tls import TLSConfig
|
||||
from ..transport import SSLHTTPAdapter, UnixHTTPAdapter
|
||||
from ..transport import UnixHTTPAdapter
|
||||
from ..utils import check_resource, config, update_headers, utils
|
||||
from ..utils.json_stream import json_stream
|
||||
from ..utils.proxy import ProxyConfig
|
||||
|
@ -160,10 +170,10 @@ class APIClient(
|
|||
base_url, timeout, pool_connections=num_pools,
|
||||
max_pool_size=max_pool_size
|
||||
)
|
||||
except NameError:
|
||||
except NameError as err:
|
||||
raise DockerException(
|
||||
'Install pypiwin32 package to enable npipe:// support'
|
||||
)
|
||||
) from err
|
||||
self.mount('http+docker://', self._custom_adapter)
|
||||
self.base_url = 'http+docker://localnpipe'
|
||||
elif base_url.startswith('ssh://'):
|
||||
|
@ -172,10 +182,10 @@ class APIClient(
|
|||
base_url, timeout, pool_connections=num_pools,
|
||||
max_pool_size=max_pool_size, shell_out=use_ssh_client
|
||||
)
|
||||
except NameError:
|
||||
except NameError as err:
|
||||
raise DockerException(
|
||||
'Install paramiko package to enable ssh:// support'
|
||||
)
|
||||
) from err
|
||||
self.mount('http+docker://ssh', self._custom_adapter)
|
||||
self._unmount('http://', 'https://')
|
||||
self.base_url = 'http+docker://ssh'
|
||||
|
@ -184,7 +194,7 @@ class APIClient(
|
|||
if isinstance(tls, TLSConfig):
|
||||
tls.configure_client(self)
|
||||
elif tls:
|
||||
self._custom_adapter = SSLHTTPAdapter(
|
||||
self._custom_adapter = requests.adapters.HTTPAdapter(
|
||||
pool_connections=num_pools)
|
||||
self.mount('https://', self._custom_adapter)
|
||||
self.base_url = base_url
|
||||
|
@ -199,28 +209,27 @@ class APIClient(
|
|||
self._version = version
|
||||
if not isinstance(self._version, str):
|
||||
raise DockerException(
|
||||
'Version parameter must be a string or None. Found {}'.format(
|
||||
type(version).__name__
|
||||
)
|
||||
'Version parameter must be a string or None. '
|
||||
f'Found {type(version).__name__}'
|
||||
)
|
||||
if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
|
||||
raise InvalidVersion(
|
||||
'API versions below {} are no longer supported by this '
|
||||
'library.'.format(MINIMUM_DOCKER_API_VERSION)
|
||||
f'API versions below {MINIMUM_DOCKER_API_VERSION} are '
|
||||
f'no longer supported by this library.'
|
||||
)
|
||||
|
||||
def _retrieve_server_version(self):
|
||||
try:
|
||||
return self.version(api_version=False)["ApiVersion"]
|
||||
except KeyError:
|
||||
except KeyError as ke:
|
||||
raise DockerException(
|
||||
'Invalid response from docker daemon: key "ApiVersion"'
|
||||
' is missing.'
|
||||
)
|
||||
) from ke
|
||||
except Exception as e:
|
||||
raise DockerException(
|
||||
f'Error while fetching server API version: {e}'
|
||||
)
|
||||
) from e
|
||||
|
||||
def _set_request_timeout(self, kwargs):
|
||||
"""Prepare the kwargs for an HTTP request by inserting the timeout
|
||||
|
@ -248,19 +257,17 @@ class APIClient(
|
|||
for arg in args:
|
||||
if not isinstance(arg, str):
|
||||
raise ValueError(
|
||||
'Expected a string but found {} ({}) '
|
||||
'instead'.format(arg, type(arg))
|
||||
f'Expected a string but found {arg} ({type(arg)}) instead'
|
||||
)
|
||||
|
||||
quote_f = partial(urllib.parse.quote, safe="/:")
|
||||
args = map(quote_f, args)
|
||||
|
||||
formatted_path = pathfmt.format(*args)
|
||||
if kwargs.get('versioned_api', True):
|
||||
return '{}/v{}{}'.format(
|
||||
self.base_url, self._version, pathfmt.format(*args)
|
||||
)
|
||||
return f'{self.base_url}/v{self._version}{formatted_path}'
|
||||
else:
|
||||
return f'{self.base_url}{pathfmt.format(*args)}'
|
||||
return f'{self.base_url}{formatted_path}'
|
||||
|
||||
def _raise_for_status(self, response):
|
||||
"""Raises stored :class:`APIError`, if one occurred."""
|
||||
|
@ -312,7 +319,16 @@ class APIClient(
|
|||
return self._create_websocket_connection(full_url)
|
||||
|
||||
def _create_websocket_connection(self, url):
|
||||
try:
|
||||
import websocket
|
||||
return websocket.create_connection(url)
|
||||
except ImportError as ie:
|
||||
raise DockerException(
|
||||
'The `websocket-client` library is required '
|
||||
'for using websocket connections. '
|
||||
'You can install the `docker` library '
|
||||
'with the [websocket] extra to install it.'
|
||||
) from ie
|
||||
|
||||
def _get_raw_response_socket(self, response):
|
||||
self._raise_for_status(response)
|
||||
|
@ -406,6 +422,10 @@ class APIClient(
|
|||
yield from response.iter_content(chunk_size, decode)
|
||||
|
||||
def _read_from_socket(self, response, stream, tty=True, demux=False):
|
||||
"""Consume all data from the socket, close the response and return the
|
||||
data. If stream=True, then a generator is returned instead and the
|
||||
caller is responsible for closing the response.
|
||||
"""
|
||||
socket = self._get_raw_response_socket(response)
|
||||
|
||||
gen = frames_iter(socket, tty)
|
||||
|
@ -420,8 +440,11 @@ class APIClient(
|
|||
if stream:
|
||||
return gen
|
||||
else:
|
||||
# Wait for all the frames, concatenate them, and return the result
|
||||
try:
|
||||
# Wait for all frames, concatenate them, and return the result
|
||||
return consume_socket_output(gen, demux=demux)
|
||||
finally:
|
||||
response.close()
|
||||
|
||||
def _disable_socket_timeout(self, socket):
|
||||
""" Depending on the combination of python version and whether we're
|
||||
|
@ -472,7 +495,7 @@ class APIClient(
|
|||
return self._multiplexed_response_stream_helper(res)
|
||||
else:
|
||||
return sep.join(
|
||||
[x for x in self._multiplexed_buffer_helper(res)]
|
||||
list(self._multiplexed_buffer_helper(res))
|
||||
)
|
||||
|
||||
def _unmount(self, *args):
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
from datetime import datetime
|
||||
|
||||
from .. import errors
|
||||
from .. import utils
|
||||
from .. import errors, utils
|
||||
from ..constants import DEFAULT_DATA_CHUNK_SIZE
|
||||
from ..types import CancellableStream
|
||||
from ..types import ContainerConfig
|
||||
from ..types import EndpointConfig
|
||||
from ..types import HostConfig
|
||||
from ..types import NetworkingConfig
|
||||
from ..types import (
|
||||
CancellableStream,
|
||||
ContainerConfig,
|
||||
EndpointConfig,
|
||||
HostConfig,
|
||||
NetworkingConfig,
|
||||
)
|
||||
|
||||
|
||||
class ContainerApiMixin:
|
||||
|
@ -112,7 +113,7 @@ class ContainerApiMixin:
|
|||
|
||||
@utils.check_resource('container')
|
||||
def commit(self, container, repository=None, tag=None, message=None,
|
||||
author=None, changes=None, conf=None):
|
||||
author=None, pause=True, changes=None, conf=None):
|
||||
"""
|
||||
Commit a container to an image. Similar to the ``docker commit``
|
||||
command.
|
||||
|
@ -123,6 +124,7 @@ class ContainerApiMixin:
|
|||
tag (str): The tag to push
|
||||
message (str): A commit message
|
||||
author (str): The name of the author
|
||||
pause (bool): Whether to pause the container before committing
|
||||
changes (str): Dockerfile instructions to apply while committing
|
||||
conf (dict): The configuration for the container. See the
|
||||
`Engine API documentation
|
||||
|
@ -139,6 +141,7 @@ class ContainerApiMixin:
|
|||
'tag': tag,
|
||||
'comment': message,
|
||||
'author': author,
|
||||
'pause': pause,
|
||||
'changes': changes
|
||||
}
|
||||
u = self._url("/commit")
|
||||
|
@ -317,6 +320,11 @@ class ContainerApiMixin:
|
|||
'/var/www': {
|
||||
'bind': '/mnt/vol1',
|
||||
'mode': 'ro',
|
||||
},
|
||||
'/autofs/user1': {
|
||||
'bind': '/mnt/vol3',
|
||||
'mode': 'rw',
|
||||
'propagation': 'shared'
|
||||
}
|
||||
})
|
||||
)
|
||||
|
@ -327,10 +335,11 @@ class ContainerApiMixin:
|
|||
.. code-block:: python
|
||||
|
||||
container_id = client.api.create_container(
|
||||
'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'],
|
||||
'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2', '/mnt/vol3'],
|
||||
host_config=client.api.create_host_config(binds=[
|
||||
'/home/user1/:/mnt/vol2',
|
||||
'/var/www:/mnt/vol1:ro',
|
||||
'/autofs/user1:/mnt/vol3:rw,shared',
|
||||
])
|
||||
)
|
||||
|
||||
|
@ -678,7 +687,8 @@ class ContainerApiMixin:
|
|||
container (str): The container to diff
|
||||
|
||||
Returns:
|
||||
(str)
|
||||
(list) A list of dictionaries containing the attributes `Path`
|
||||
and `Kind`.
|
||||
|
||||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
|
@ -834,7 +844,7 @@ class ContainerApiMixin:
|
|||
float (in fractional seconds)
|
||||
|
||||
Returns:
|
||||
(generator or str)
|
||||
(generator of bytes or bytes)
|
||||
|
||||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
|
@ -861,7 +871,7 @@ class ContainerApiMixin:
|
|||
else:
|
||||
raise errors.InvalidArgument(
|
||||
'since value should be datetime or positive int/float,'
|
||||
'not {}'.format(type(since))
|
||||
f' not {type(since)}'
|
||||
)
|
||||
|
||||
if until is not None:
|
||||
|
@ -877,8 +887,8 @@ class ContainerApiMixin:
|
|||
params['until'] = until
|
||||
else:
|
||||
raise errors.InvalidArgument(
|
||||
'until value should be datetime or positive int/float, '
|
||||
'not {}'.format(type(until))
|
||||
f'until value should be datetime or positive int/float, '
|
||||
f'not {type(until)}'
|
||||
)
|
||||
|
||||
url = self._url("/containers/{0}/logs", container)
|
||||
|
@ -950,7 +960,7 @@ class ContainerApiMixin:
|
|||
return port_settings.get(private_port)
|
||||
|
||||
for protocol in ['tcp', 'udp', 'sctp']:
|
||||
h_ports = port_settings.get(private_port + '/' + protocol)
|
||||
h_ports = port_settings.get(f"{private_port}/{protocol}")
|
||||
if h_ports:
|
||||
break
|
||||
|
||||
|
@ -966,7 +976,7 @@ class ContainerApiMixin:
|
|||
container (str): The container where the file(s) will be extracted
|
||||
path (str): Path inside the container where the file(s) will be
|
||||
extracted. Must exist.
|
||||
data (bytes): tar data to be extracted
|
||||
data (bytes or stream): tar data to be extracted
|
||||
|
||||
Returns:
|
||||
(bool): True if the call succeeds.
|
||||
|
@ -1126,7 +1136,7 @@ class ContainerApiMixin:
|
|||
self._raise_for_status(res)
|
||||
|
||||
@utils.check_resource('container')
|
||||
def stats(self, container, decode=None, stream=True):
|
||||
def stats(self, container, decode=None, stream=True, one_shot=None):
|
||||
"""
|
||||
Stream statistics for a specific container. Similar to the
|
||||
``docker stats`` command.
|
||||
|
@ -1138,6 +1148,9 @@ class ContainerApiMixin:
|
|||
False by default.
|
||||
stream (bool): If set to false, only the current stats will be
|
||||
returned instead of a stream. True by default.
|
||||
one_shot (bool): If set to true, Only get a single stat instead of
|
||||
waiting for 2 cycles. Must be used with stream=false. False by
|
||||
default.
|
||||
|
||||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
|
@ -1145,16 +1158,30 @@ class ContainerApiMixin:
|
|||
|
||||
"""
|
||||
url = self._url("/containers/{0}/stats", container)
|
||||
params = {
|
||||
'stream': stream
|
||||
}
|
||||
if one_shot is not None:
|
||||
if utils.version_lt(self._version, '1.41'):
|
||||
raise errors.InvalidVersion(
|
||||
'one_shot is not supported for API version < 1.41'
|
||||
)
|
||||
params['one-shot'] = one_shot
|
||||
if stream:
|
||||
return self._stream_helper(self._get(url, stream=True),
|
||||
decode=decode)
|
||||
if one_shot:
|
||||
raise errors.InvalidArgument(
|
||||
'one_shot is only available in conjunction with '
|
||||
'stream=False'
|
||||
)
|
||||
return self._stream_helper(
|
||||
self._get(url, stream=True, params=params), decode=decode
|
||||
)
|
||||
else:
|
||||
if decode:
|
||||
raise errors.InvalidArgument(
|
||||
"decode is only available in conjunction with stream=True"
|
||||
)
|
||||
return self._result(self._get(url, params={'stream': False}),
|
||||
json=True)
|
||||
return self._result(self._get(url, params=params), json=True)
|
||||
|
||||
@utils.check_resource('container')
|
||||
def stop(self, container, timeout=None):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from .. import errors
|
||||
from .. import utils
|
||||
from .. import errors, utils
|
||||
from ..types import CancellableStream
|
||||
|
||||
|
||||
class ExecApiMixin:
|
||||
|
@ -125,9 +125,10 @@ class ExecApiMixin:
|
|||
detach (bool): If true, detach from the exec command.
|
||||
Default: False
|
||||
tty (bool): Allocate a pseudo-TTY. Default: False
|
||||
stream (bool): Stream response data. Default: False
|
||||
stream (bool): Return response data progressively as an iterator
|
||||
of strings, rather than a single string.
|
||||
socket (bool): Return the connection socket to allow custom
|
||||
read/write operations.
|
||||
read/write operations. Must be closed by the caller when done.
|
||||
demux (bool): Return stdout and stderr separately
|
||||
|
||||
Returns:
|
||||
|
@ -161,7 +162,15 @@ class ExecApiMixin:
|
|||
stream=True
|
||||
)
|
||||
if detach:
|
||||
try:
|
||||
return self._result(res)
|
||||
finally:
|
||||
res.close()
|
||||
if socket:
|
||||
return self._get_raw_response_socket(res)
|
||||
return self._read_from_socket(res, stream, tty=tty, demux=demux)
|
||||
|
||||
output = self._read_from_socket(res, stream, tty=tty, demux=demux)
|
||||
if stream:
|
||||
return CancellableStream(output, res)
|
||||
else:
|
||||
return output
|
||||
|
|
|
@ -47,7 +47,7 @@ class ImageApiMixin:
|
|||
image (str): The image to show history for
|
||||
|
||||
Returns:
|
||||
(str): The history of the image
|
||||
(list): The history of the image
|
||||
|
||||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
from ..errors import InvalidVersion
|
||||
from ..utils import check_resource, minimum_version
|
||||
from ..utils import version_lt
|
||||
from .. import utils
|
||||
from ..errors import InvalidVersion
|
||||
from ..utils import check_resource, minimum_version, version_lt
|
||||
|
||||
|
||||
class NetworkApiMixin:
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import base64
|
||||
|
||||
from .. import errors
|
||||
from .. import utils
|
||||
from .. import errors, utils
|
||||
|
||||
|
||||
class SecretApiMixin:
|
||||
|
|
|
@ -7,9 +7,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec,
|
|||
|
||||
def raise_version_error(param, min_version):
|
||||
raise errors.InvalidVersion(
|
||||
'{} is not supported in API version < {}'.format(
|
||||
param, min_version
|
||||
)
|
||||
f'{param} is not supported in API version < {min_version}'
|
||||
)
|
||||
|
||||
if update_config is not None:
|
||||
|
@ -262,7 +260,7 @@ class ServiceApiMixin:
|
|||
return True
|
||||
|
||||
@utils.minimum_version('1.24')
|
||||
def services(self, filters=None):
|
||||
def services(self, filters=None, status=None):
|
||||
"""
|
||||
List services.
|
||||
|
||||
|
@ -270,6 +268,8 @@ class ServiceApiMixin:
|
|||
filters (dict): Filters to process on the nodes list. Valid
|
||||
filters: ``id``, ``name`` , ``label`` and ``mode``.
|
||||
Default: ``None``.
|
||||
status (bool): Include the service task count of running and
|
||||
desired tasks. Default: ``None``.
|
||||
|
||||
Returns:
|
||||
A list of dictionaries containing data about each service.
|
||||
|
@ -281,6 +281,12 @@ class ServiceApiMixin:
|
|||
params = {
|
||||
'filters': utils.convert_filters(filters) if filters else None
|
||||
}
|
||||
if status is not None:
|
||||
if utils.version_lt(self._version, '1.41'):
|
||||
raise errors.InvalidVersion(
|
||||
'status is not supported in API version < 1.41'
|
||||
)
|
||||
params['status'] = status
|
||||
url = self._url('/services')
|
||||
return self._result(self._get(url, params=params), True)
|
||||
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import logging
|
||||
import http.client as http_client
|
||||
import logging
|
||||
|
||||
from .. import errors, types, utils
|
||||
from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE
|
||||
from .. import errors
|
||||
from .. import types
|
||||
from .. import utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from .. import errors
|
||||
from .. import utils
|
||||
from .. import errors, utils
|
||||
|
||||
|
||||
class VolumeApiMixin:
|
||||
|
|
|
@ -2,8 +2,7 @@ import base64
|
|||
import json
|
||||
import logging
|
||||
|
||||
from . import credentials
|
||||
from . import errors
|
||||
from . import credentials, errors
|
||||
from .utils import config
|
||||
|
||||
INDEX_NAME = 'docker.io'
|
||||
|
@ -22,15 +21,15 @@ def resolve_repository_name(repo_name):
|
|||
index_name, remote_name = split_repo_name(repo_name)
|
||||
if index_name[0] == '-' or index_name[-1] == '-':
|
||||
raise errors.InvalidRepository(
|
||||
'Invalid index name ({}). Cannot begin or end with a'
|
||||
' hyphen.'.format(index_name)
|
||||
f'Invalid index name ({index_name}). '
|
||||
'Cannot begin or end with a hyphen.'
|
||||
)
|
||||
return resolve_index_name(index_name), remote_name
|
||||
|
||||
|
||||
def resolve_index_name(index_name):
|
||||
index_name = convert_to_hostname(index_name)
|
||||
if index_name == 'index.' + INDEX_NAME:
|
||||
if index_name == f"index.{INDEX_NAME}":
|
||||
index_name = INDEX_NAME
|
||||
return index_name
|
||||
|
||||
|
@ -99,9 +98,7 @@ class AuthConfig(dict):
|
|||
for registry, entry in entries.items():
|
||||
if not isinstance(entry, dict):
|
||||
log.debug(
|
||||
'Config entry for key {} is not auth config'.format(
|
||||
registry
|
||||
)
|
||||
f'Config entry for key {registry} is not auth config'
|
||||
)
|
||||
# We sometimes fall back to parsing the whole config as if it
|
||||
# was the auth config by itself, for legacy purposes. In that
|
||||
|
@ -109,17 +106,11 @@ class AuthConfig(dict):
|
|||
# keys is not formatted properly.
|
||||
if raise_on_error:
|
||||
raise errors.InvalidConfigFile(
|
||||
'Invalid configuration for registry {}'.format(
|
||||
registry
|
||||
)
|
||||
f'Invalid configuration for registry {registry}'
|
||||
)
|
||||
return {}
|
||||
if 'identitytoken' in entry:
|
||||
log.debug(
|
||||
'Found an IdentityToken entry for registry {}'.format(
|
||||
registry
|
||||
)
|
||||
)
|
||||
log.debug(f'Found an IdentityToken entry for registry {registry}')
|
||||
conf[registry] = {
|
||||
'IdentityToken': entry['identitytoken']
|
||||
}
|
||||
|
@ -130,16 +121,15 @@ class AuthConfig(dict):
|
|||
# a valid value in the auths config.
|
||||
# https://github.com/docker/compose/issues/3265
|
||||
log.debug(
|
||||
'Auth data for {} is absent. Client might be using a '
|
||||
'credentials store instead.'.format(registry)
|
||||
f'Auth data for {registry} is absent. '
|
||||
f'Client might be using a credentials store instead.'
|
||||
)
|
||||
conf[registry] = {}
|
||||
continue
|
||||
|
||||
username, password = decode_auth(entry['auth'])
|
||||
log.debug(
|
||||
'Found entry (registry={}, username={})'
|
||||
.format(repr(registry), repr(username))
|
||||
f'Found entry (registry={registry!r}, username={username!r})'
|
||||
)
|
||||
|
||||
conf[registry] = {
|
||||
|
@ -277,7 +267,7 @@ class AuthConfig(dict):
|
|||
except credentials.StoreError as e:
|
||||
raise errors.DockerException(
|
||||
f'Credentials store error: {repr(e)}'
|
||||
)
|
||||
) from e
|
||||
|
||||
def _get_store_instance(self, name):
|
||||
if name not in self._stores:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from .api.client import APIClient
|
||||
from .constants import (DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE)
|
||||
from .constants import DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS
|
||||
from .models.configs import ConfigCollection
|
||||
from .models.containers import ContainerCollection
|
||||
from .models.images import ImageCollection
|
||||
|
@ -71,8 +71,6 @@ class DockerClient:
|
|||
timeout (int): Default timeout for API calls, in seconds.
|
||||
max_pool_size (int): The maximum number of connections
|
||||
to save in the pool.
|
||||
ssl_version (int): A valid `SSL version`_.
|
||||
assert_hostname (bool): Verify the hostname of the server.
|
||||
environment (dict): The environment to read environment variables
|
||||
from. Default: the value of ``os.environ``
|
||||
credstore_env (dict): Override environment variables when calling
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import sys
|
||||
|
||||
from .version import __version__
|
||||
|
||||
DEFAULT_DOCKER_API_VERSION = '1.41'
|
||||
MINIMUM_DOCKER_API_VERSION = '1.21'
|
||||
DEFAULT_DOCKER_API_VERSION = '1.45'
|
||||
MINIMUM_DOCKER_API_VERSION = '1.24'
|
||||
DEFAULT_TIMEOUT_SECONDS = 60
|
||||
STREAM_HEADER_SIZE_BYTES = 8
|
||||
CONTAINER_LIMITS_KEYS = [
|
||||
|
|
|
@ -1,3 +1,2 @@
|
|||
# flake8: noqa
|
||||
from .context import Context
|
||||
from .api import ContextAPI
|
||||
from .context import Context
|
||||
|
|
|
@ -2,11 +2,14 @@ import json
|
|||
import os
|
||||
|
||||
from docker import errors
|
||||
from docker.context.config import get_meta_dir
|
||||
from docker.context.config import METAFILE
|
||||
from docker.context.config import get_current_context_name
|
||||
from docker.context.config import write_context_name_to_docker_config
|
||||
from docker.context import Context
|
||||
|
||||
from .config import (
|
||||
METAFILE,
|
||||
get_current_context_name,
|
||||
get_meta_dir,
|
||||
write_context_name_to_docker_config,
|
||||
)
|
||||
from .context import Context
|
||||
|
||||
|
||||
class ContextAPI:
|
||||
|
@ -113,8 +116,8 @@ class ContextAPI:
|
|||
names.append(data["Name"])
|
||||
except Exception as e:
|
||||
raise errors.ContextException(
|
||||
"Failed to load metafile {}: {}".format(
|
||||
filename, e))
|
||||
f"Failed to load metafile {filename}: {e}",
|
||||
) from e
|
||||
|
||||
contexts = [cls.DEFAULT_CONTEXT]
|
||||
for name in names:
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
import os
|
||||
import json
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
|
||||
from docker import utils
|
||||
from docker.constants import IS_WINDOWS_PLATFORM
|
||||
from docker.constants import DEFAULT_UNIX_SOCKET
|
||||
from docker.constants import DEFAULT_UNIX_SOCKET, IS_WINDOWS_PLATFORM
|
||||
from docker.utils.config import find_config_file
|
||||
|
||||
METAFILE = "meta.json"
|
||||
|
@ -77,5 +76,6 @@ def get_context_host(path=None, tls=False):
|
|||
host = utils.parse_host(path, IS_WINDOWS_PLATFORM, tls)
|
||||
if host == DEFAULT_UNIX_SOCKET:
|
||||
# remove http+ from default docker socket url
|
||||
return host.strip("http+")
|
||||
if host.startswith("http+"):
|
||||
host = host[5:]
|
||||
return host
|
||||
|
|
|
@ -1,12 +1,16 @@
|
|||
import os
|
||||
import json
|
||||
import os
|
||||
from shutil import copyfile, rmtree
|
||||
from docker.tls import TLSConfig
|
||||
|
||||
from docker.errors import ContextException
|
||||
from docker.context.config import get_meta_dir
|
||||
from docker.context.config import get_meta_file
|
||||
from docker.context.config import get_tls_dir
|
||||
from docker.context.config import get_context_host
|
||||
from docker.tls import TLSConfig
|
||||
|
||||
from .config import (
|
||||
get_context_host,
|
||||
get_meta_dir,
|
||||
get_meta_file,
|
||||
get_tls_dir,
|
||||
)
|
||||
|
||||
|
||||
class Context:
|
||||
|
@ -42,8 +46,9 @@ class Context:
|
|||
for k, v in endpoints.items():
|
||||
if not isinstance(v, dict):
|
||||
# unknown format
|
||||
raise ContextException("""Unknown endpoint format for
|
||||
context {}: {}""".format(name, v))
|
||||
raise ContextException(
|
||||
f"Unknown endpoint format for context {name}: {v}",
|
||||
)
|
||||
|
||||
self.endpoints[k] = v
|
||||
if k != "docker":
|
||||
|
@ -96,8 +101,9 @@ class Context:
|
|||
metadata = json.load(f)
|
||||
except (OSError, KeyError, ValueError) as e:
|
||||
# unknown format
|
||||
raise Exception("""Detected corrupted meta file for
|
||||
context {} : {}""".format(name, e))
|
||||
raise Exception(
|
||||
f"Detected corrupted meta file for context {name} : {e}"
|
||||
) from e
|
||||
|
||||
# for docker endpoints, set defaults for
|
||||
# Host and SkipTLSVerify fields
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
# flake8: noqa
|
||||
from .constants import (
|
||||
DEFAULT_LINUX_STORE,
|
||||
DEFAULT_OSX_STORE,
|
||||
DEFAULT_WIN32_STORE,
|
||||
PROGRAM_PREFIX,
|
||||
)
|
||||
from .errors import CredentialsNotFound, StoreError
|
||||
from .store import Store
|
||||
from .errors import StoreError, CredentialsNotFound
|
||||
from .constants import *
|
||||
|
|
|
@ -13,13 +13,5 @@ class InitializationError(StoreError):
|
|||
def process_store_error(cpe, program):
|
||||
message = cpe.output.decode('utf-8')
|
||||
if 'credentials not found in native keychain' in message:
|
||||
return CredentialsNotFound(
|
||||
'No matching credentials in {}'.format(
|
||||
program
|
||||
)
|
||||
)
|
||||
return StoreError(
|
||||
'Credentials store {} exited with "{}".'.format(
|
||||
program, cpe.output.decode('utf-8').strip()
|
||||
)
|
||||
)
|
||||
return CredentialsNotFound(f'No matching credentials in {program}')
|
||||
return StoreError(f'Credentials store {program} exited with "{message}".')
|
||||
|
|
|
@ -2,9 +2,9 @@ import errno
|
|||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
from . import constants
|
||||
from . import errors
|
||||
from . import constants, errors
|
||||
from .utils import create_environment_dict
|
||||
|
||||
|
||||
|
@ -18,10 +18,9 @@ class Store:
|
|||
self.exe = shutil.which(self.program)
|
||||
self.environment = environment
|
||||
if self.exe is None:
|
||||
raise errors.InitializationError(
|
||||
'{} not installed or not available in PATH'.format(
|
||||
self.program
|
||||
)
|
||||
warnings.warn(
|
||||
f'{self.program} not installed or not available in PATH',
|
||||
stacklevel=1,
|
||||
)
|
||||
|
||||
def get(self, server):
|
||||
|
@ -70,6 +69,10 @@ class Store:
|
|||
return json.loads(data.decode('utf-8'))
|
||||
|
||||
def _execute(self, subcmd, data_input):
|
||||
if self.exe is None:
|
||||
raise errors.StoreError(
|
||||
f'{self.program} not installed or not available in PATH'
|
||||
)
|
||||
output = None
|
||||
env = create_environment_dict(self.environment)
|
||||
try:
|
||||
|
@ -77,18 +80,14 @@ class Store:
|
|||
[self.exe, subcmd], input=data_input, env=env,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise errors.process_store_error(e, self.program)
|
||||
raise errors.process_store_error(e, self.program) from e
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
raise errors.StoreError(
|
||||
'{} not installed or not available in PATH'.format(
|
||||
self.program
|
||||
)
|
||||
)
|
||||
f'{self.program} not installed or not available in PATH'
|
||||
) from e
|
||||
else:
|
||||
raise errors.StoreError(
|
||||
'Unexpected OS error "{}", errno={}'.format(
|
||||
e.strerror, e.errno
|
||||
)
|
||||
)
|
||||
f'Unexpected OS error "{e.strerror}", errno={e.errno}'
|
||||
) from e
|
||||
return output
|
||||
|
|
|
@ -27,7 +27,7 @@ def create_api_error_from_http_exception(e):
|
|||
try:
|
||||
explanation = response.json()['message']
|
||||
except ValueError:
|
||||
explanation = (response.content or '').strip()
|
||||
explanation = (response.text or '').strip()
|
||||
cls = APIError
|
||||
if response.status_code == 404:
|
||||
explanation_msg = (explanation or '').lower()
|
||||
|
@ -54,14 +54,16 @@ class APIError(requests.exceptions.HTTPError, DockerException):
|
|||
message = super().__str__()
|
||||
|
||||
if self.is_client_error():
|
||||
message = '{} Client Error for {}: {}'.format(
|
||||
self.response.status_code, self.response.url,
|
||||
self.response.reason)
|
||||
message = (
|
||||
f'{self.response.status_code} Client Error for '
|
||||
f'{self.response.url}: {self.response.reason}'
|
||||
)
|
||||
|
||||
elif self.is_server_error():
|
||||
message = '{} Server Error for {}: {}'.format(
|
||||
self.response.status_code, self.response.url,
|
||||
self.response.reason)
|
||||
message = (
|
||||
f'{self.response.status_code} Server Error for '
|
||||
f'{self.response.url}: {self.response.reason}'
|
||||
)
|
||||
|
||||
if self.explanation:
|
||||
message = f'{message} ("{self.explanation}")'
|
||||
|
@ -142,10 +144,10 @@ class ContainerError(DockerException):
|
|||
self.stderr = stderr
|
||||
|
||||
err = f": {stderr}" if stderr is not None else ""
|
||||
msg = ("Command '{}' in image '{}' returned non-zero exit "
|
||||
"status {}{}").format(command, image, exit_status, err)
|
||||
|
||||
super().__init__(msg)
|
||||
super().__init__(
|
||||
f"Command '{command}' in image '{image}' "
|
||||
f"returned non-zero exit status {exit_status}{err}"
|
||||
)
|
||||
|
||||
|
||||
class StreamParseError(RuntimeError):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from ..api import APIClient
|
||||
from .resource import Model, Collection
|
||||
from .resource import Collection, Model
|
||||
|
||||
|
||||
class Config(Model):
|
||||
|
@ -30,6 +30,7 @@ class ConfigCollection(Collection):
|
|||
|
||||
def create(self, **kwargs):
|
||||
obj = self.client.api.create_config(**kwargs)
|
||||
obj.setdefault("Spec", {})["Name"] = kwargs.get("name")
|
||||
return self.prepare_model(obj)
|
||||
create.__doc__ = APIClient.create_config.__doc__
|
||||
|
||||
|
|
|
@ -5,10 +5,13 @@ from collections import namedtuple
|
|||
from ..api import APIClient
|
||||
from ..constants import DEFAULT_DATA_CHUNK_SIZE
|
||||
from ..errors import (
|
||||
ContainerError, DockerException, ImageNotFound,
|
||||
NotFound, create_unexpected_kwargs_error
|
||||
ContainerError,
|
||||
DockerException,
|
||||
ImageNotFound,
|
||||
NotFound,
|
||||
create_unexpected_kwargs_error,
|
||||
)
|
||||
from ..types import HostConfig
|
||||
from ..types import HostConfig, NetworkingConfig
|
||||
from ..utils import version_gte
|
||||
from .images import Image
|
||||
from .resource import Collection, Model
|
||||
|
@ -21,6 +24,7 @@ class Container(Model):
|
|||
query the Docker daemon for the current properties, causing
|
||||
:py:attr:`attrs` to be refreshed.
|
||||
"""
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""
|
||||
|
@ -47,11 +51,11 @@ class Container(Model):
|
|||
try:
|
||||
result = self.attrs['Config'].get('Labels')
|
||||
return result or {}
|
||||
except KeyError:
|
||||
except KeyError as ke:
|
||||
raise DockerException(
|
||||
'Label data is not available for sparse objects. Call reload()'
|
||||
' to retrieve all information'
|
||||
)
|
||||
) from ke
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
|
@ -62,6 +66,15 @@ class Container(Model):
|
|||
return self.attrs['State']['Status']
|
||||
return self.attrs['State']
|
||||
|
||||
@property
|
||||
def health(self):
|
||||
"""
|
||||
The healthcheck status of the container.
|
||||
|
||||
For example, ``healthy`, or ``unhealthy`.
|
||||
"""
|
||||
return self.attrs.get('State', {}).get('Health', {}).get('Status', 'unknown')
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""
|
||||
|
@ -121,6 +134,7 @@ class Container(Model):
|
|||
tag (str): The tag to push
|
||||
message (str): A commit message
|
||||
author (str): The name of the author
|
||||
pause (bool): Whether to pause the container before committing
|
||||
changes (str): Dockerfile instructions to apply while committing
|
||||
conf (dict): The configuration for the container. See the
|
||||
`Engine API documentation
|
||||
|
@ -141,7 +155,8 @@ class Container(Model):
|
|||
Inspect changes on a container's filesystem.
|
||||
|
||||
Returns:
|
||||
(str)
|
||||
(list) A list of dictionaries containing the attributes `Path`
|
||||
and `Kind`.
|
||||
|
||||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
|
@ -166,7 +181,8 @@ class Container(Model):
|
|||
user (str): User to execute command as. Default: root
|
||||
detach (bool): If true, detach from the exec command.
|
||||
Default: False
|
||||
stream (bool): Stream response data. Default: False
|
||||
stream (bool): Stream response data. Ignored if ``detach`` is true.
|
||||
Default: False
|
||||
socket (bool): Return the connection socket to allow custom
|
||||
read/write operations. Default: False
|
||||
environment (dict or list): A dictionary or a list of strings in
|
||||
|
@ -298,7 +314,7 @@ class Container(Model):
|
|||
float (in nanoseconds)
|
||||
|
||||
Returns:
|
||||
(generator or str): Logs from the container.
|
||||
(generator of bytes or bytes): Logs from the container.
|
||||
|
||||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
|
@ -324,7 +340,7 @@ class Container(Model):
|
|||
Args:
|
||||
path (str): Path inside the container where the file(s) will be
|
||||
extracted. Must exist.
|
||||
data (bytes): tar data to be extracted
|
||||
data (bytes or stream): tar data to be extracted
|
||||
|
||||
Returns:
|
||||
(bool): True if the call succeeds.
|
||||
|
@ -679,6 +695,14 @@ class ContainerCollection(Collection):
|
|||
This mode is incompatible with ``ports``.
|
||||
|
||||
Incompatible with ``network``.
|
||||
networking_config (Dict[str, EndpointConfig]):
|
||||
Dictionary of EndpointConfig objects for each container network.
|
||||
The key is the name of the network.
|
||||
Defaults to ``None``.
|
||||
|
||||
Used in conjuction with ``network``.
|
||||
|
||||
Incompatible with ``network_mode``.
|
||||
oom_kill_disable (bool): Whether to disable OOM killer.
|
||||
oom_score_adj (int): An integer value containing the score given
|
||||
to the container in order to tune OOM killer preferences.
|
||||
|
@ -843,6 +867,12 @@ class ContainerCollection(Collection):
|
|||
'together.'
|
||||
)
|
||||
|
||||
if kwargs.get('networking_config') and not kwargs.get('network'):
|
||||
raise RuntimeError(
|
||||
'The option "networking_config" can not be used '
|
||||
'without "network".'
|
||||
)
|
||||
|
||||
try:
|
||||
container = self.create(image=image, command=command,
|
||||
detach=detach, **kwargs)
|
||||
|
@ -877,9 +907,9 @@ class ContainerCollection(Collection):
|
|||
container, exit_status, command, image, out
|
||||
)
|
||||
|
||||
return out if stream or out is None else b''.join(
|
||||
[line for line in out]
|
||||
)
|
||||
if stream or out is None:
|
||||
return out
|
||||
return b''.join(out)
|
||||
|
||||
def create(self, image, command=None, **kwargs):
|
||||
"""
|
||||
|
@ -995,6 +1025,7 @@ class ContainerCollection(Collection):
|
|||
|
||||
def prune(self, filters=None):
|
||||
return self.client.api.prune_containers(filters=filters)
|
||||
|
||||
prune.__doc__ = APIClient.prune_containers.__doc__
|
||||
|
||||
|
||||
|
@ -1113,8 +1144,17 @@ def _create_container_args(kwargs):
|
|||
host_config_kwargs['binds'] = volumes
|
||||
|
||||
network = kwargs.pop('network', None)
|
||||
networking_config = kwargs.pop('networking_config', None)
|
||||
if network:
|
||||
create_kwargs['networking_config'] = {network: None}
|
||||
if networking_config:
|
||||
# Sanity check: check if the network is defined in the
|
||||
# networking config dict, otherwise switch to None
|
||||
if network not in networking_config:
|
||||
networking_config = None
|
||||
|
||||
create_kwargs['networking_config'] = NetworkingConfig(
|
||||
networking_config
|
||||
) if networking_config else {network: None}
|
||||
host_config_kwargs['network_mode'] = network
|
||||
|
||||
# All kwargs should have been consumed by this point, so raise
|
||||
|
@ -1147,8 +1187,10 @@ def _host_volume_from_bind(bind):
|
|||
bits = rest.split(':', 1)
|
||||
if len(bits) == 1 or bits[1] in ('ro', 'rw'):
|
||||
return drive + bits[0]
|
||||
elif bits[1].endswith(':ro') or bits[1].endswith(':rw'):
|
||||
return bits[1][:-3]
|
||||
else:
|
||||
return bits[1].rstrip(':ro').rstrip(':rw')
|
||||
return bits[1]
|
||||
|
||||
|
||||
ExecResult = namedtuple('ExecResult', 'exit_code,output')
|
||||
|
|
|
@ -15,10 +15,8 @@ class Image(Model):
|
|||
An image on the server.
|
||||
"""
|
||||
def __repr__(self):
|
||||
return "<{}: '{}'>".format(
|
||||
self.__class__.__name__,
|
||||
"', '".join(self.tags),
|
||||
)
|
||||
tag_str = "', '".join(self.tags)
|
||||
return f"<{self.__class__.__name__}: '{tag_str}'>"
|
||||
|
||||
@property
|
||||
def labels(self):
|
||||
|
@ -53,7 +51,7 @@ class Image(Model):
|
|||
Show the history of an image.
|
||||
|
||||
Returns:
|
||||
(str): The history of the image.
|
||||
(list): The history of the image.
|
||||
|
||||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
|
@ -409,8 +407,8 @@ class ImageCollection(Collection):
|
|||
if match:
|
||||
image_id = match.group(2)
|
||||
images.append(image_id)
|
||||
if 'error' in chunk:
|
||||
raise ImageLoadError(chunk['error'])
|
||||
if 'errorDetail' in chunk:
|
||||
raise ImageLoadError(chunk['errorDetail']['message'])
|
||||
|
||||
return [self.get(i) for i in images]
|
||||
|
||||
|
@ -458,7 +456,8 @@ class ImageCollection(Collection):
|
|||
if 'stream' in kwargs:
|
||||
warnings.warn(
|
||||
'`stream` is not a valid parameter for this method'
|
||||
' and will be overridden'
|
||||
' and will be overridden',
|
||||
stacklevel=1,
|
||||
)
|
||||
del kwargs['stream']
|
||||
|
||||
|
@ -471,9 +470,8 @@ class ImageCollection(Collection):
|
|||
# to be pulled.
|
||||
pass
|
||||
if not all_tags:
|
||||
return self.get('{0}{2}{1}'.format(
|
||||
repository, tag, '@' if tag.startswith('sha256:') else ':'
|
||||
))
|
||||
sep = '@' if tag.startswith('sha256:') else ':'
|
||||
return self.get(f'{repository}{sep}{tag}')
|
||||
return self.list(repository)
|
||||
|
||||
def push(self, repository, tag=None, **kwargs):
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from ..api import APIClient
|
||||
from ..utils import version_gte
|
||||
from .containers import Container
|
||||
from .resource import Model, Collection
|
||||
from .resource import Collection, Model
|
||||
|
||||
|
||||
class Network(Model):
|
||||
|
@ -184,7 +184,7 @@ class NetworkCollection(Collection):
|
|||
|
||||
def list(self, *args, **kwargs):
|
||||
"""
|
||||
List networks. Similar to the ``docker networks ls`` command.
|
||||
List networks. Similar to the ``docker network ls`` command.
|
||||
|
||||
Args:
|
||||
names (:py:class:`list`): List of names to filter by.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from .resource import Model, Collection
|
||||
from .resource import Collection, Model
|
||||
|
||||
|
||||
class Node(Model):
|
||||
|
|
|
@ -187,7 +187,7 @@ class PluginCollection(Collection):
|
|||
"""
|
||||
privileges = self.client.api.plugin_privileges(remote_name)
|
||||
it = self.client.api.pull_plugin(remote_name, privileges, local_name)
|
||||
for data in it:
|
||||
for _data in it:
|
||||
pass
|
||||
return self.get(local_name or remote_name)
|
||||
|
||||
|
|
|
@ -64,9 +64,10 @@ class Collection:
|
|||
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise TypeError(
|
||||
"'{}' object is not callable. You might be trying to use the old "
|
||||
"(pre-2.0) API - use docker.APIClient if so."
|
||||
.format(self.__class__.__name__))
|
||||
f"'{self.__class__.__name__}' object is not callable. "
|
||||
"You might be trying to use the old (pre-2.0) API - "
|
||||
"use docker.APIClient if so."
|
||||
)
|
||||
|
||||
def list(self):
|
||||
raise NotImplementedError
|
||||
|
@ -88,5 +89,4 @@ class Collection:
|
|||
elif isinstance(attrs, dict):
|
||||
return self.model(attrs=attrs, client=self.client, collection=self)
|
||||
else:
|
||||
raise Exception("Can't create %s from %s" %
|
||||
(self.model.__name__, attrs))
|
||||
raise Exception(f"Can't create {self.model.__name__} from {attrs}")
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from ..api import APIClient
|
||||
from .resource import Model, Collection
|
||||
from .resource import Collection, Model
|
||||
|
||||
|
||||
class Secret(Model):
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import copy
|
||||
from docker.errors import create_unexpected_kwargs_error, InvalidArgument
|
||||
from docker.types import TaskTemplate, ContainerSpec, Placement, ServiceMode
|
||||
from .resource import Model, Collection
|
||||
|
||||
from docker.errors import InvalidArgument, create_unexpected_kwargs_error
|
||||
from docker.types import ContainerSpec, Placement, ServiceMode, TaskTemplate
|
||||
|
||||
from .resource import Collection, Model
|
||||
|
||||
|
||||
class Service(Model):
|
||||
|
@ -266,6 +268,8 @@ class ServiceCollection(Collection):
|
|||
filters (dict): Filters to process on the nodes list. Valid
|
||||
filters: ``id``, ``name`` , ``label`` and ``mode``.
|
||||
Default: ``None``.
|
||||
status (bool): Include the service task count of running and
|
||||
desired tasks. Default: ``None``.
|
||||
|
||||
Returns:
|
||||
list of :py:class:`Service`: The services.
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from docker.api import APIClient
|
||||
from docker.errors import APIError
|
||||
|
||||
from .resource import Model
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from ..api import APIClient
|
||||
from .resource import Model, Collection
|
||||
from .resource import Collection, Model
|
||||
|
||||
|
||||
class Volume(Model):
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
import os
|
||||
import ssl
|
||||
|
||||
from . import errors
|
||||
from .transport import SSLHTTPAdapter
|
||||
|
||||
|
||||
class TLSConfig:
|
||||
|
@ -15,35 +13,18 @@ class TLSConfig:
|
|||
verify (bool or str): This can be a bool or a path to a CA cert
|
||||
file to verify against. If ``True``, verify using ca_cert;
|
||||
if ``False`` or not specified, do not verify.
|
||||
ssl_version (int): A valid `SSL version`_.
|
||||
assert_hostname (bool): Verify the hostname of the server.
|
||||
|
||||
.. _`SSL version`:
|
||||
https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_TLSv1
|
||||
"""
|
||||
cert = None
|
||||
ca_cert = None
|
||||
verify = None
|
||||
ssl_version = None
|
||||
|
||||
def __init__(self, client_cert=None, ca_cert=None, verify=None,
|
||||
ssl_version=None, assert_hostname=None,
|
||||
assert_fingerprint=None):
|
||||
def __init__(self, client_cert=None, ca_cert=None, verify=None):
|
||||
# Argument compatibility/mapping with
|
||||
# https://docs.docker.com/engine/articles/https/
|
||||
# This diverges from the Docker CLI in that users can specify 'tls'
|
||||
# here, but also disable any public/default CA pool verification by
|
||||
# leaving verify=False
|
||||
|
||||
self.assert_hostname = assert_hostname
|
||||
self.assert_fingerprint = assert_fingerprint
|
||||
|
||||
# If the user provides an SSL version, we should use their preference
|
||||
if ssl_version:
|
||||
self.ssl_version = ssl_version
|
||||
else:
|
||||
self.ssl_version = ssl.PROTOCOL_TLS_CLIENT
|
||||
|
||||
# "client_cert" must have both or neither cert/key files. In
|
||||
# either case, Alert the user when both are expected, but any are
|
||||
# missing.
|
||||
|
@ -55,7 +36,7 @@ class TLSConfig:
|
|||
raise errors.TLSParameterError(
|
||||
'client_cert must be a tuple of'
|
||||
' (client certificate, key file)'
|
||||
)
|
||||
) from None
|
||||
|
||||
if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or
|
||||
not os.path.isfile(tls_key)):
|
||||
|
@ -77,8 +58,6 @@ class TLSConfig:
|
|||
"""
|
||||
Configure a client with these TLS options.
|
||||
"""
|
||||
client.ssl_version = self.ssl_version
|
||||
|
||||
if self.verify and self.ca_cert:
|
||||
client.verify = self.ca_cert
|
||||
else:
|
||||
|
@ -86,9 +65,3 @@ class TLSConfig:
|
|||
|
||||
if self.cert:
|
||||
client.cert = self.cert
|
||||
|
||||
client.mount('https://', SSLHTTPAdapter(
|
||||
ssl_version=self.ssl_version,
|
||||
assert_hostname=self.assert_hostname,
|
||||
assert_fingerprint=self.assert_fingerprint,
|
||||
))
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# flake8: noqa
|
||||
from .unixconn import UnixHTTPAdapter
|
||||
from .ssladapter import SSLHTTPAdapter
|
||||
|
||||
try:
|
||||
from .npipeconn import NpipeHTTPAdapter
|
||||
from .npipesocket import NpipeSocket
|
||||
|
|
|
@ -6,3 +6,8 @@ class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
|
|||
super().close()
|
||||
if hasattr(self, 'pools'):
|
||||
self.pools.clear()
|
||||
|
||||
# Fix for requests 2.32.2+:
|
||||
# https://github.com/psf/requests/commit/c98e4d133ef29c46a9b68cd783087218a8075e05
|
||||
def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
|
||||
return self.get_connection(request.url, proxies)
|
||||
|
|
|
@ -1,21 +1,17 @@
|
|||
import queue
|
||||
|
||||
import requests.adapters
|
||||
|
||||
from docker.transport.basehttpadapter import BaseHTTPAdapter
|
||||
from .. import constants
|
||||
from .npipesocket import NpipeSocket
|
||||
|
||||
import http.client as httplib
|
||||
|
||||
try:
|
||||
import requests.packages.urllib3 as urllib3
|
||||
except ImportError:
|
||||
import urllib3
|
||||
import urllib3.connection
|
||||
|
||||
from .. import constants
|
||||
from .basehttpadapter import BaseHTTPAdapter
|
||||
from .npipesocket import NpipeSocket
|
||||
|
||||
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
|
||||
|
||||
|
||||
class NpipeHTTPConnection(httplib.HTTPConnection):
|
||||
class NpipeHTTPConnection(urllib3.connection.HTTPConnection):
|
||||
def __init__(self, npipe_path, timeout=60):
|
||||
super().__init__(
|
||||
'localhost', timeout=timeout
|
||||
|
@ -50,9 +46,8 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
conn = None
|
||||
try:
|
||||
conn = self.pool.get(block=self.block, timeout=timeout)
|
||||
|
||||
except AttributeError: # self.pool is None
|
||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
|
||||
except AttributeError as ae: # self.pool is None
|
||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae
|
||||
|
||||
except queue.Empty:
|
||||
if self.block:
|
||||
|
@ -60,7 +55,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
self,
|
||||
"Pool reached maximum size and no more "
|
||||
"connections are allowed."
|
||||
)
|
||||
) from None
|
||||
# Oh well, we'll create a new connection then
|
||||
|
||||
return conn or self._new_conn()
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
import functools
|
||||
import time
|
||||
import io
|
||||
import time
|
||||
|
||||
import pywintypes
|
||||
import win32api
|
||||
import win32event
|
||||
import win32file
|
||||
import win32pipe
|
||||
|
||||
|
@ -54,7 +57,9 @@ class NpipeSocket:
|
|||
0,
|
||||
None,
|
||||
win32file.OPEN_EXISTING,
|
||||
cSECURITY_ANONYMOUS | cSECURITY_SQOS_PRESENT,
|
||||
(cSECURITY_ANONYMOUS
|
||||
| cSECURITY_SQOS_PRESENT
|
||||
| win32file.FILE_FLAG_OVERLAPPED),
|
||||
0
|
||||
)
|
||||
except win32pipe.error as e:
|
||||
|
@ -131,22 +136,37 @@ class NpipeSocket:
|
|||
if not isinstance(buf, memoryview):
|
||||
readbuf = memoryview(buf)
|
||||
|
||||
event = win32event.CreateEvent(None, True, True, None)
|
||||
try:
|
||||
overlapped = pywintypes.OVERLAPPED()
|
||||
overlapped.hEvent = event
|
||||
err, data = win32file.ReadFile(
|
||||
self._handle,
|
||||
readbuf[:nbytes] if nbytes else readbuf
|
||||
readbuf[:nbytes] if nbytes else readbuf,
|
||||
overlapped
|
||||
)
|
||||
return len(data)
|
||||
|
||||
def _recv_into_py2(self, buf, nbytes):
|
||||
err, data = win32file.ReadFile(self._handle, nbytes or len(buf))
|
||||
n = len(data)
|
||||
buf[:n] = data
|
||||
return n
|
||||
wait_result = win32event.WaitForSingleObject(event, self._timeout)
|
||||
if wait_result == win32event.WAIT_TIMEOUT:
|
||||
win32file.CancelIo(self._handle)
|
||||
raise TimeoutError
|
||||
return win32file.GetOverlappedResult(self._handle, overlapped, 0)
|
||||
finally:
|
||||
win32api.CloseHandle(event)
|
||||
|
||||
@check_closed
|
||||
def send(self, string, flags=0):
|
||||
err, nbytes = win32file.WriteFile(self._handle, string)
|
||||
return nbytes
|
||||
event = win32event.CreateEvent(None, True, True, None)
|
||||
try:
|
||||
overlapped = pywintypes.OVERLAPPED()
|
||||
overlapped.hEvent = event
|
||||
win32file.WriteFile(self._handle, string, overlapped)
|
||||
wait_result = win32event.WaitForSingleObject(event, self._timeout)
|
||||
if wait_result == win32event.WAIT_TIMEOUT:
|
||||
win32file.CancelIo(self._handle)
|
||||
raise TimeoutError
|
||||
return win32file.GetOverlappedResult(self._handle, overlapped, 0)
|
||||
finally:
|
||||
win32api.CloseHandle(event)
|
||||
|
||||
@check_closed
|
||||
def sendall(self, string, flags=0):
|
||||
|
@ -165,15 +185,12 @@ class NpipeSocket:
|
|||
def settimeout(self, value):
|
||||
if value is None:
|
||||
# Blocking mode
|
||||
self._timeout = win32pipe.NMPWAIT_WAIT_FOREVER
|
||||
self._timeout = win32event.INFINITE
|
||||
elif not isinstance(value, (float, int)) or value < 0:
|
||||
raise ValueError('Timeout value out of range')
|
||||
elif value == 0:
|
||||
# Non-blocking mode
|
||||
self._timeout = win32pipe.NMPWAIT_NO_WAIT
|
||||
else:
|
||||
# Timeout mode - Value converted to milliseconds
|
||||
self._timeout = value * 1000
|
||||
self._timeout = int(value * 1000)
|
||||
|
||||
def gettimeout(self):
|
||||
return self._timeout
|
||||
|
|
|
@ -1,22 +1,18 @@
|
|||
import paramiko
|
||||
import queue
|
||||
import urllib.parse
|
||||
import requests.adapters
|
||||
import logging
|
||||
import os
|
||||
import queue
|
||||
import signal
|
||||
import socket
|
||||
import subprocess
|
||||
import urllib.parse
|
||||
|
||||
from docker.transport.basehttpadapter import BaseHTTPAdapter
|
||||
from .. import constants
|
||||
|
||||
import http.client as httplib
|
||||
|
||||
try:
|
||||
import requests.packages.urllib3 as urllib3
|
||||
except ImportError:
|
||||
import paramiko
|
||||
import requests.adapters
|
||||
import urllib3
|
||||
import urllib3.connection
|
||||
|
||||
from .. import constants
|
||||
from .basehttpadapter import BaseHTTPAdapter
|
||||
|
||||
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
|
||||
|
||||
|
@ -99,7 +95,7 @@ class SSHSocket(socket.socket):
|
|||
self.proc.terminate()
|
||||
|
||||
|
||||
class SSHConnection(httplib.HTTPConnection):
|
||||
class SSHConnection(urllib3.connection.HTTPConnection):
|
||||
def __init__(self, ssh_transport=None, timeout=60, host=None):
|
||||
super().__init__(
|
||||
'localhost', timeout=timeout
|
||||
|
@ -145,8 +141,8 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
try:
|
||||
conn = self.pool.get(block=self.block, timeout=timeout)
|
||||
|
||||
except AttributeError: # self.pool is None
|
||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
|
||||
except AttributeError as ae: # self.pool is None
|
||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae
|
||||
|
||||
except queue.Empty:
|
||||
if self.block:
|
||||
|
@ -154,7 +150,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
self,
|
||||
"Pool reached maximum size and no more "
|
||||
"connections are allowed."
|
||||
)
|
||||
) from None
|
||||
# Oh well, we'll create a new connection then
|
||||
|
||||
return conn or self._new_conn()
|
||||
|
|
|
@ -1,65 +0,0 @@
|
|||
""" Resolves OpenSSL issues in some servers:
|
||||
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
|
||||
https://github.com/kennethreitz/requests/pull/799
|
||||
"""
|
||||
from packaging.version import Version
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
from docker.transport.basehttpadapter import BaseHTTPAdapter
|
||||
|
||||
try:
|
||||
import requests.packages.urllib3 as urllib3
|
||||
except ImportError:
|
||||
import urllib3
|
||||
|
||||
|
||||
PoolManager = urllib3.poolmanager.PoolManager
|
||||
|
||||
|
||||
class SSLHTTPAdapter(BaseHTTPAdapter):
|
||||
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
|
||||
|
||||
__attrs__ = HTTPAdapter.__attrs__ + ['assert_fingerprint',
|
||||
'assert_hostname',
|
||||
'ssl_version']
|
||||
|
||||
def __init__(self, ssl_version=None, assert_hostname=None,
|
||||
assert_fingerprint=None, **kwargs):
|
||||
self.ssl_version = ssl_version
|
||||
self.assert_hostname = assert_hostname
|
||||
self.assert_fingerprint = assert_fingerprint
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def init_poolmanager(self, connections, maxsize, block=False):
|
||||
kwargs = {
|
||||
'num_pools': connections,
|
||||
'maxsize': maxsize,
|
||||
'block': block,
|
||||
'assert_hostname': self.assert_hostname,
|
||||
'assert_fingerprint': self.assert_fingerprint,
|
||||
}
|
||||
if self.ssl_version and self.can_override_ssl_version():
|
||||
kwargs['ssl_version'] = self.ssl_version
|
||||
|
||||
self.poolmanager = PoolManager(**kwargs)
|
||||
|
||||
def get_connection(self, *args, **kwargs):
|
||||
"""
|
||||
Ensure assert_hostname is set correctly on our pool
|
||||
|
||||
We already take care of a normal poolmanager via init_poolmanager
|
||||
|
||||
But we still need to take care of when there is a proxy poolmanager
|
||||
"""
|
||||
conn = super().get_connection(*args, **kwargs)
|
||||
if conn.assert_hostname != self.assert_hostname:
|
||||
conn.assert_hostname = self.assert_hostname
|
||||
return conn
|
||||
|
||||
def can_override_ssl_version(self):
|
||||
urllib_ver = urllib3.__version__.split('-')[0]
|
||||
if urllib_ver is None:
|
||||
return False
|
||||
if urllib_ver == 'dev':
|
||||
return True
|
||||
return Version(urllib_ver) > Version('1.5')
|
|
@ -1,20 +1,16 @@
|
|||
import requests.adapters
|
||||
import socket
|
||||
import http.client as httplib
|
||||
|
||||
from docker.transport.basehttpadapter import BaseHTTPAdapter
|
||||
from .. import constants
|
||||
|
||||
try:
|
||||
import requests.packages.urllib3 as urllib3
|
||||
except ImportError:
|
||||
import requests.adapters
|
||||
import urllib3
|
||||
import urllib3.connection
|
||||
|
||||
from .. import constants
|
||||
from .basehttpadapter import BaseHTTPAdapter
|
||||
|
||||
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
|
||||
|
||||
|
||||
class UnixHTTPConnection(httplib.HTTPConnection):
|
||||
class UnixHTTPConnection(urllib3.connection.HTTPConnection):
|
||||
|
||||
def __init__(self, base_url, unix_socket, timeout=60):
|
||||
super().__init__(
|
||||
|
@ -30,12 +26,6 @@ class UnixHTTPConnection(httplib.HTTPConnection):
|
|||
sock.connect(self.unix_socket)
|
||||
self.sock = sock
|
||||
|
||||
def putheader(self, header, *values):
|
||||
super().putheader(header, *values)
|
||||
|
||||
def response_class(self, sock, *args, **kwargs):
|
||||
return httplib.HTTPResponse(sock, *args, **kwargs)
|
||||
|
||||
|
||||
class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||
def __init__(self, base_url, socket_path, timeout=60, maxsize=10):
|
||||
|
@ -64,7 +54,7 @@ class UnixHTTPAdapter(BaseHTTPAdapter):
|
|||
max_pool_size=constants.DEFAULT_MAX_POOL_SIZE):
|
||||
socket_path = socket_url.replace('http+unix://', '')
|
||||
if not socket_path.startswith('/'):
|
||||
socket_path = '/' + socket_path
|
||||
socket_path = f"/{socket_path}"
|
||||
self.socket_path = socket_path
|
||||
self.timeout = timeout
|
||||
self.max_pool_size = max_pool_size
|
||||
|
|
|
@ -1,14 +1,24 @@
|
|||
# flake8: noqa
|
||||
from .containers import (
|
||||
ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest
|
||||
)
|
||||
from .containers import ContainerConfig, DeviceRequest, HostConfig, LogConfig, Ulimit
|
||||
from .daemon import CancellableStream
|
||||
from .healthcheck import Healthcheck
|
||||
from .networks import EndpointConfig, IPAMConfig, IPAMPool, NetworkingConfig
|
||||
from .services import (
|
||||
ConfigReference, ContainerSpec, DNSConfig, DriverConfig, EndpointSpec,
|
||||
Mount, Placement, PlacementPreference, Privileges, Resources,
|
||||
RestartPolicy, RollbackConfig, SecretReference, ServiceMode, TaskTemplate,
|
||||
UpdateConfig, NetworkAttachmentConfig
|
||||
ConfigReference,
|
||||
ContainerSpec,
|
||||
DNSConfig,
|
||||
DriverConfig,
|
||||
EndpointSpec,
|
||||
Mount,
|
||||
NetworkAttachmentConfig,
|
||||
Placement,
|
||||
PlacementPreference,
|
||||
Privileges,
|
||||
Resources,
|
||||
RestartPolicy,
|
||||
RollbackConfig,
|
||||
SecretReference,
|
||||
ServiceMode,
|
||||
TaskTemplate,
|
||||
UpdateConfig,
|
||||
)
|
||||
from .swarm import SwarmSpec, SwarmExternalCA
|
||||
from .swarm import SwarmExternalCA, SwarmSpec
|
||||
|
|
|
@ -1,8 +1,16 @@
|
|||
from .. import errors
|
||||
from ..utils.utils import (
|
||||
convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds,
|
||||
format_environment, format_extra_hosts, normalize_links, parse_bytes,
|
||||
parse_devices, split_command, version_gte, version_lt,
|
||||
convert_port_bindings,
|
||||
convert_tmpfs_mounts,
|
||||
convert_volume_binds,
|
||||
format_environment,
|
||||
format_extra_hosts,
|
||||
normalize_links,
|
||||
parse_bytes,
|
||||
parse_devices,
|
||||
split_command,
|
||||
version_gte,
|
||||
version_lt,
|
||||
)
|
||||
from .base import DictType
|
||||
from .healthcheck import Healthcheck
|
||||
|
@ -48,8 +56,11 @@ class LogConfig(DictType):
|
|||
>>> container = client.create_container('busybox', 'true',
|
||||
... host_config=hc)
|
||||
>>> client.inspect_container(container)['HostConfig']['LogConfig']
|
||||
{'Type': 'json-file', 'Config': {'labels': 'production_status,geo', 'max-size': '1g'}}
|
||||
""" # noqa: E501
|
||||
{
|
||||
'Type': 'json-file',
|
||||
'Config': {'labels': 'production_status,geo', 'max-size': '1g'}
|
||||
}
|
||||
"""
|
||||
types = LogConfigTypesEnum
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
|
@ -652,25 +663,25 @@ class HostConfig(dict):
|
|||
|
||||
|
||||
def host_config_type_error(param, param_value, expected):
|
||||
error_msg = 'Invalid type for {0} param: expected {1} but found {2}'
|
||||
return TypeError(error_msg.format(param, expected, type(param_value)))
|
||||
return TypeError(
|
||||
f'Invalid type for {param} param: expected {expected} '
|
||||
f'but found {type(param_value)}'
|
||||
)
|
||||
|
||||
|
||||
def host_config_version_error(param, version, less_than=True):
|
||||
operator = '<' if less_than else '>'
|
||||
error_msg = '{0} param is not supported in API versions {1} {2}'
|
||||
return errors.InvalidVersion(error_msg.format(param, operator, version))
|
||||
|
||||
return errors.InvalidVersion(
|
||||
f'{param} param is not supported in API versions {operator} {version}',
|
||||
)
|
||||
|
||||
def host_config_value_error(param, param_value):
|
||||
error_msg = 'Invalid value for {0} param: {1}'
|
||||
return ValueError(error_msg.format(param, param_value))
|
||||
return ValueError(f'Invalid value for {param} param: {param_value}')
|
||||
|
||||
|
||||
def host_config_incompatible_error(param, param_value, incompatible_param):
|
||||
error_msg = '\"{1}\" {0} is incompatible with {2}'
|
||||
return errors.InvalidArgument(
|
||||
error_msg.format(param, param_value, incompatible_param)
|
||||
f'\"{param_value}\" {param} is incompatible with {incompatible_param}'
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
import socket
|
||||
|
||||
try:
|
||||
import requests.packages.urllib3 as urllib3
|
||||
except ImportError:
|
||||
import urllib3
|
||||
|
||||
from ..errors import DockerException
|
||||
|
@ -31,9 +28,9 @@ class CancellableStream:
|
|||
try:
|
||||
return next(self._stream)
|
||||
except urllib3.exceptions.ProtocolError:
|
||||
raise StopIteration
|
||||
raise StopIteration from None
|
||||
except OSError:
|
||||
raise StopIteration
|
||||
raise StopIteration from None
|
||||
|
||||
next = __next__
|
||||
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
from .. import errors
|
||||
from ..constants import IS_WINDOWS_PLATFORM
|
||||
from ..utils import (
|
||||
check_resource, format_environment, format_extra_hosts, parse_bytes,
|
||||
split_command, convert_service_networks,
|
||||
check_resource,
|
||||
convert_service_networks,
|
||||
format_environment,
|
||||
format_extra_hosts,
|
||||
parse_bytes,
|
||||
split_command,
|
||||
)
|
||||
|
||||
|
||||
|
@ -238,6 +242,7 @@ class Mount(dict):
|
|||
for the ``volume`` type.
|
||||
driver_config (DriverConfig): Volume driver configuration. Only valid
|
||||
for the ``volume`` type.
|
||||
subpath (str): Path inside a volume to mount instead of the volume root.
|
||||
tmpfs_size (int or string): The size for the tmpfs mount in bytes.
|
||||
tmpfs_mode (int): The permission mode for the tmpfs mount.
|
||||
"""
|
||||
|
@ -245,7 +250,7 @@ class Mount(dict):
|
|||
def __init__(self, target, source, type='volume', read_only=False,
|
||||
consistency=None, propagation=None, no_copy=False,
|
||||
labels=None, driver_config=None, tmpfs_size=None,
|
||||
tmpfs_mode=None):
|
||||
tmpfs_mode=None, subpath=None):
|
||||
self['Target'] = target
|
||||
self['Source'] = source
|
||||
if type not in ('bind', 'volume', 'tmpfs', 'npipe'):
|
||||
|
@ -263,7 +268,7 @@ class Mount(dict):
|
|||
self['BindOptions'] = {
|
||||
'Propagation': propagation
|
||||
}
|
||||
if any([labels, driver_config, no_copy, tmpfs_size, tmpfs_mode]):
|
||||
if any([labels, driver_config, no_copy, tmpfs_size, tmpfs_mode, subpath]):
|
||||
raise errors.InvalidArgument(
|
||||
'Incompatible options have been provided for the bind '
|
||||
'type mount.'
|
||||
|
@ -276,6 +281,8 @@ class Mount(dict):
|
|||
volume_opts['Labels'] = labels
|
||||
if driver_config:
|
||||
volume_opts['DriverConfig'] = driver_config
|
||||
if subpath:
|
||||
volume_opts['Subpath'] = subpath
|
||||
if volume_opts:
|
||||
self['VolumeOptions'] = volume_opts
|
||||
if any([propagation, tmpfs_size, tmpfs_mode]):
|
||||
|
@ -371,7 +378,7 @@ def _convert_generic_resources_dict(generic_resources):
|
|||
if not isinstance(generic_resources, dict):
|
||||
raise errors.InvalidArgument(
|
||||
'generic_resources must be a dict or a list '
|
||||
' (found {})'.format(type(generic_resources))
|
||||
f'(found {type(generic_resources)})'
|
||||
)
|
||||
resources = []
|
||||
for kind, value in generic_resources.items():
|
||||
|
@ -381,9 +388,9 @@ def _convert_generic_resources_dict(generic_resources):
|
|||
elif isinstance(value, str):
|
||||
resource_type = 'NamedResourceSpec'
|
||||
else:
|
||||
kv = {kind: value}
|
||||
raise errors.InvalidArgument(
|
||||
'Unsupported generic resource reservation '
|
||||
'type: {}'.format({kind: value})
|
||||
f'Unsupported generic resource reservation type: {kv}'
|
||||
)
|
||||
resources.append({
|
||||
resource_type: {'Kind': kind, 'Value': value}
|
||||
|
@ -764,8 +771,8 @@ class PlacementPreference(dict):
|
|||
def __init__(self, strategy, descriptor):
|
||||
if strategy != 'spread':
|
||||
raise errors.InvalidArgument(
|
||||
'PlacementPreference strategy value is invalid ({}):'
|
||||
' must be "spread".'.format(strategy)
|
||||
f'PlacementPreference strategy value is invalid ({strategy}): '
|
||||
'must be "spread".'
|
||||
)
|
||||
self['Spread'] = {'SpreadDescriptor': descriptor}
|
||||
|
||||
|
|
|
@ -1,13 +1,28 @@
|
|||
# flake8: noqa
|
||||
from .build import create_archive, exclude_paths, mkbuildcontext, tar
|
||||
|
||||
from .build import create_archive, exclude_paths, match_tag, mkbuildcontext, tar
|
||||
from .decorators import check_resource, minimum_version, update_headers
|
||||
from .utils import (
|
||||
compare_version, convert_port_bindings, convert_volume_binds,
|
||||
parse_repository_tag, parse_host,
|
||||
kwargs_from_env, convert_filters, datetime_to_timestamp,
|
||||
create_host_config, parse_bytes, parse_env_file, version_lt,
|
||||
version_gte, decode_json_header, split_command, create_ipam_config,
|
||||
create_ipam_pool, parse_devices, normalize_links, convert_service_networks,
|
||||
format_environment, format_extra_hosts
|
||||
compare_version,
|
||||
convert_filters,
|
||||
convert_port_bindings,
|
||||
convert_service_networks,
|
||||
convert_volume_binds,
|
||||
create_host_config,
|
||||
create_ipam_config,
|
||||
create_ipam_pool,
|
||||
datetime_to_timestamp,
|
||||
decode_json_header,
|
||||
format_environment,
|
||||
format_extra_hosts,
|
||||
kwargs_from_env,
|
||||
normalize_links,
|
||||
parse_bytes,
|
||||
parse_devices,
|
||||
parse_env_file,
|
||||
parse_host,
|
||||
parse_repository_tag,
|
||||
split_command,
|
||||
version_gte,
|
||||
version_lt,
|
||||
)
|
||||
|
||||
|
|
|
@ -4,11 +4,19 @@ import re
|
|||
import tarfile
|
||||
import tempfile
|
||||
|
||||
from .fnmatch import fnmatch
|
||||
from ..constants import IS_WINDOWS_PLATFORM
|
||||
|
||||
from .fnmatch import fnmatch
|
||||
|
||||
_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/')
|
||||
_TAG = re.compile(
|
||||
r"^[a-z0-9]+((\.|_|__|-+)[a-z0-9]+)*"
|
||||
r"(?::[0-9]+)?(/[a-z0-9]+((\.|_|__|-+)[a-z0-9]+)*)*"
|
||||
r"(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127})?$"
|
||||
)
|
||||
|
||||
|
||||
def match_tag(tag: str) -> bool:
|
||||
return bool(_TAG.match(tag))
|
||||
|
||||
|
||||
def tar(path, exclude=None, dockerfile=None, fileobj=None, gzip=False):
|
||||
|
@ -42,7 +50,7 @@ def exclude_paths(root, patterns, dockerfile=None):
|
|||
if dockerfile is None:
|
||||
dockerfile = 'Dockerfile'
|
||||
|
||||
patterns.append('!' + dockerfile)
|
||||
patterns.append(f"!{dockerfile}")
|
||||
pm = PatternMatcher(patterns)
|
||||
return set(pm.walk(root))
|
||||
|
||||
|
@ -93,10 +101,10 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
|
|||
try:
|
||||
with open(full_path, 'rb') as f:
|
||||
t.addfile(i, f)
|
||||
except OSError:
|
||||
except OSError as oe:
|
||||
raise OSError(
|
||||
f'Can not read file in context: {full_path}'
|
||||
)
|
||||
) from oe
|
||||
else:
|
||||
# Directories, FIFOs, symlinks... don't need to be read.
|
||||
t.addfile(i, None)
|
||||
|
@ -180,7 +188,7 @@ class PatternMatcher:
|
|||
fpath = os.path.join(
|
||||
os.path.relpath(current_dir, root), f
|
||||
)
|
||||
if fpath.startswith('.' + os.path.sep):
|
||||
if fpath.startswith(f".{os.path.sep}"):
|
||||
fpath = fpath[2:]
|
||||
match = self.matches(fpath)
|
||||
if not match:
|
||||
|
|
|
@ -27,9 +27,7 @@ def minimum_version(version):
|
|||
def wrapper(self, *args, **kwargs):
|
||||
if utils.version_lt(self._version, version):
|
||||
raise errors.InvalidVersion(
|
||||
'{} is not available for version < {}'.format(
|
||||
f.__name__, version
|
||||
)
|
||||
f'{f.__name__} is not available for version < {version}',
|
||||
)
|
||||
return f(self, *args, **kwargs)
|
||||
return wrapper
|
||||
|
|
|
@ -79,18 +79,18 @@ def translate(pat):
|
|||
i = i + 1
|
||||
if i >= n:
|
||||
# is "**EOF" - to align with .gitignore just accept all
|
||||
res = res + '.*'
|
||||
res = f"{res}.*"
|
||||
else:
|
||||
# is "**"
|
||||
# Note that this allows for any # of /'s (even 0) because
|
||||
# the .* will eat everything, even /'s
|
||||
res = res + '(.*/)?'
|
||||
res = f"{res}(.*/)?"
|
||||
else:
|
||||
# is "*" so map it to anything but "/"
|
||||
res = res + '[^/]*'
|
||||
res = f"{res}[^/]*"
|
||||
elif c == '?':
|
||||
# "?" is any char except "/"
|
||||
res = res + '[^/]'
|
||||
res = f"{res}[^/]"
|
||||
elif c == '[':
|
||||
j = i
|
||||
if j < n and pat[j] == '!':
|
||||
|
@ -100,16 +100,16 @@ def translate(pat):
|
|||
while j < n and pat[j] != ']':
|
||||
j = j + 1
|
||||
if j >= n:
|
||||
res = res + '\\['
|
||||
res = f"{res}\\["
|
||||
else:
|
||||
stuff = pat[i:j].replace('\\', '\\\\')
|
||||
i = j + 1
|
||||
if stuff[0] == '!':
|
||||
stuff = '^' + stuff[1:]
|
||||
stuff = f"^{stuff[1:]}"
|
||||
elif stuff[0] == '^':
|
||||
stuff = '\\' + stuff
|
||||
stuff = f"\\{stuff}"
|
||||
res = f'{res}[{stuff}]'
|
||||
else:
|
||||
res = res + re.escape(c)
|
||||
|
||||
return res + '$'
|
||||
return f"{res}$"
|
||||
|
|
|
@ -3,7 +3,6 @@ import json.decoder
|
|||
|
||||
from ..errors import StreamParseError
|
||||
|
||||
|
||||
json_decoder = json.JSONDecoder()
|
||||
|
||||
|
||||
|
@ -72,4 +71,4 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
|
|||
try:
|
||||
yield decoder(buffered)
|
||||
except Exception as e:
|
||||
raise StreamParseError(e)
|
||||
raise StreamParseError(e) from e
|
||||
|
|
|
@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False):
|
|||
if not end:
|
||||
return [start + proto]
|
||||
if randomly_available_port:
|
||||
return [f'{start}-{end}' + proto]
|
||||
return [f"{start}-{end}{proto}"]
|
||||
return [str(port) + proto for port in range(int(start), int(end) + 1)]
|
||||
|
||||
|
||||
|
|
|
@ -69,5 +69,9 @@ class ProxyConfig(dict):
|
|||
return proxy_env + environment
|
||||
|
||||
def __str__(self):
|
||||
return 'ProxyConfig(http={}, https={}, ftp={}, no_proxy={})'.format(
|
||||
self.http, self.https, self.ftp, self.no_proxy)
|
||||
return (
|
||||
'ProxyConfig('
|
||||
f'http={self.http}, https={self.https}, '
|
||||
f'ftp={self.ftp}, no_proxy={self.no_proxy}'
|
||||
')'
|
||||
)
|
||||
|
|
|
@ -31,12 +31,18 @@ def read(socket, n=4096):
|
|||
recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK)
|
||||
|
||||
if not isinstance(socket, NpipeSocket):
|
||||
if not hasattr(select, "poll"):
|
||||
# Limited to 1024
|
||||
select.select([socket], [], [])
|
||||
else:
|
||||
poll = select.poll()
|
||||
poll.register(socket, select.POLLIN | select.POLLPRI)
|
||||
poll.poll()
|
||||
|
||||
try:
|
||||
if hasattr(socket, 'recv'):
|
||||
return socket.recv(n)
|
||||
if isinstance(socket, getattr(pysocket, 'SocketIO')):
|
||||
if isinstance(socket, pysocket.SocketIO):
|
||||
return socket.read(n)
|
||||
return os.read(socket.fileno(), n)
|
||||
except OSError as e:
|
||||
|
@ -49,7 +55,7 @@ def read(socket, n=4096):
|
|||
if is_pipe_ended:
|
||||
# npipes don't support duplex sockets, so we interpret
|
||||
# a PIPE_ENDED error as a close operation (0-length read).
|
||||
return 0
|
||||
return ''
|
||||
raise
|
||||
|
||||
|
||||
|
@ -58,7 +64,7 @@ def read_exactly(socket, n):
|
|||
Reads exactly n bytes from socket
|
||||
Raises SocketError if there isn't enough data
|
||||
"""
|
||||
data = bytes()
|
||||
data = b""
|
||||
while len(data) < n:
|
||||
next_data = read(socket, n - len(data))
|
||||
if not next_data:
|
||||
|
@ -146,7 +152,7 @@ def consume_socket_output(frames, demux=False):
|
|||
if demux is False:
|
||||
# If the streams are multiplexed, the generator returns strings, that
|
||||
# we just need to concatenate.
|
||||
return bytes().join(frames)
|
||||
return b"".join(frames)
|
||||
|
||||
# If the streams are demultiplexed, the generator yields tuples
|
||||
# (stdout, stderr)
|
||||
|
|
|
@ -5,18 +5,19 @@ import os
|
|||
import os.path
|
||||
import shlex
|
||||
import string
|
||||
from datetime import datetime
|
||||
from packaging.version import Version
|
||||
|
||||
from .. import errors
|
||||
from ..constants import DEFAULT_HTTP_HOST
|
||||
from ..constants import DEFAULT_UNIX_SOCKET
|
||||
from ..constants import DEFAULT_NPIPE
|
||||
from ..constants import BYTE_UNITS
|
||||
from ..tls import TLSConfig
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from functools import lru_cache
|
||||
from itertools import zip_longest
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
from .. import errors
|
||||
from ..constants import (
|
||||
BYTE_UNITS,
|
||||
DEFAULT_HTTP_HOST,
|
||||
DEFAULT_NPIPE,
|
||||
DEFAULT_UNIX_SOCKET,
|
||||
)
|
||||
from ..tls import TLSConfig
|
||||
|
||||
URLComponents = collections.namedtuple(
|
||||
'URLComponents',
|
||||
|
@ -44,6 +45,7 @@ def decode_json_header(header):
|
|||
return json.loads(data)
|
||||
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def compare_version(v1, v2):
|
||||
"""Compare docker versions
|
||||
|
||||
|
@ -56,14 +58,20 @@ def compare_version(v1, v2):
|
|||
>>> compare_version(v2, v2)
|
||||
0
|
||||
"""
|
||||
s1 = Version(v1)
|
||||
s2 = Version(v2)
|
||||
if s1 == s2:
|
||||
if v1 == v2:
|
||||
return 0
|
||||
elif s1 > s2:
|
||||
# Split into `sys.version_info` like tuples.
|
||||
s1 = tuple(int(p) for p in v1.split('.'))
|
||||
s2 = tuple(int(p) for p in v2.split('.'))
|
||||
# Compare each component, padding with 0 if necessary.
|
||||
for c1, c2 in zip_longest(s1, s2, fillvalue=0):
|
||||
if c1 == c2:
|
||||
continue
|
||||
elif c1 > c2:
|
||||
return -1
|
||||
else:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def version_lt(v1, v2):
|
||||
|
@ -127,8 +135,7 @@ def convert_volume_binds(binds):
|
|||
if isinstance(v, dict):
|
||||
if 'ro' in v and 'mode' in v:
|
||||
raise ValueError(
|
||||
'Binding cannot contain both "ro" and "mode": {}'
|
||||
.format(repr(v))
|
||||
f'Binding cannot contain both "ro" and "mode": {v!r}'
|
||||
)
|
||||
|
||||
bind = v['bind']
|
||||
|
@ -142,6 +149,22 @@ def convert_volume_binds(binds):
|
|||
else:
|
||||
mode = 'rw'
|
||||
|
||||
# NOTE: this is only relevant for Linux hosts
|
||||
# (doesn't apply in Docker Desktop)
|
||||
propagation_modes = [
|
||||
'rshared',
|
||||
'shared',
|
||||
'rslave',
|
||||
'slave',
|
||||
'rprivate',
|
||||
'private',
|
||||
]
|
||||
if 'propagation' in v and v['propagation'] in propagation_modes:
|
||||
if mode:
|
||||
mode = f"{mode},{v['propagation']}"
|
||||
else:
|
||||
mode = v['propagation']
|
||||
|
||||
result.append(
|
||||
f'{k}:{bind}:{mode}'
|
||||
)
|
||||
|
@ -160,8 +183,8 @@ def convert_tmpfs_mounts(tmpfs):
|
|||
|
||||
if not isinstance(tmpfs, list):
|
||||
raise ValueError(
|
||||
'Expected tmpfs value to be either a list or a dict, found: {}'
|
||||
.format(type(tmpfs).__name__)
|
||||
'Expected tmpfs value to be either a list or a dict, '
|
||||
f'found: {type(tmpfs).__name__}'
|
||||
)
|
||||
|
||||
result = {}
|
||||
|
@ -175,8 +198,8 @@ def convert_tmpfs_mounts(tmpfs):
|
|||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Expected item in tmpfs list to be a string, found: {}"
|
||||
.format(type(mount).__name__)
|
||||
"Expected item in tmpfs list to be a string, "
|
||||
f"found: {type(mount).__name__}"
|
||||
)
|
||||
|
||||
result[name] = options
|
||||
|
@ -218,9 +241,9 @@ def parse_host(addr, is_win32=False, tls=False):
|
|||
|
||||
parsed_url = urlparse(addr)
|
||||
proto = parsed_url.scheme
|
||||
if not proto or any([x not in string.ascii_letters + '+' for x in proto]):
|
||||
if not proto or any(x not in f"{string.ascii_letters}+" for x in proto):
|
||||
# https://bugs.python.org/issue754016
|
||||
parsed_url = urlparse('//' + addr, 'tcp')
|
||||
parsed_url = urlparse(f"//{addr}", 'tcp')
|
||||
proto = 'tcp'
|
||||
|
||||
if proto == 'fd':
|
||||
|
@ -256,15 +279,14 @@ def parse_host(addr, is_win32=False, tls=False):
|
|||
|
||||
if parsed_url.path and proto == 'ssh':
|
||||
raise errors.DockerException(
|
||||
'Invalid bind address format: no path allowed for this protocol:'
|
||||
' {}'.format(addr)
|
||||
f'Invalid bind address format: no path allowed for this protocol: {addr}'
|
||||
)
|
||||
else:
|
||||
path = parsed_url.path
|
||||
if proto == 'unix' and parsed_url.hostname is not None:
|
||||
# For legacy reasons, we consider unix://path
|
||||
# to be valid and equivalent to unix:///path
|
||||
path = '/'.join((parsed_url.hostname, path))
|
||||
path = f"{parsed_url.hostname}/{path}"
|
||||
|
||||
netloc = parsed_url.netloc
|
||||
if proto in ('tcp', 'ssh'):
|
||||
|
@ -272,8 +294,7 @@ def parse_host(addr, is_win32=False, tls=False):
|
|||
if port <= 0:
|
||||
if proto != 'ssh':
|
||||
raise errors.DockerException(
|
||||
'Invalid bind address format: port is required:'
|
||||
' {}'.format(addr)
|
||||
f'Invalid bind address format: port is required: {addr}'
|
||||
)
|
||||
port = 22
|
||||
netloc = f'{parsed_url.netloc}:{port}'
|
||||
|
@ -283,7 +304,7 @@ def parse_host(addr, is_win32=False, tls=False):
|
|||
|
||||
# Rewrite schemes to fit library internals (requests adapters)
|
||||
if proto == 'tcp':
|
||||
proto = 'http{}'.format('s' if tls else '')
|
||||
proto = f"http{'s' if tls else ''}"
|
||||
elif proto == 'unix':
|
||||
proto = 'http+unix'
|
||||
|
||||
|
@ -329,7 +350,7 @@ def parse_devices(devices):
|
|||
return device_list
|
||||
|
||||
|
||||
def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None):
|
||||
def kwargs_from_env(environment=None):
|
||||
if not environment:
|
||||
environment = os.environ
|
||||
host = environment.get('DOCKER_HOST')
|
||||
|
@ -357,18 +378,11 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None):
|
|||
if not cert_path:
|
||||
cert_path = os.path.join(os.path.expanduser('~'), '.docker')
|
||||
|
||||
if not tls_verify and assert_hostname is None:
|
||||
# assert_hostname is a subset of TLS verification,
|
||||
# so if it's not set already then set it to false.
|
||||
assert_hostname = False
|
||||
|
||||
params['tls'] = TLSConfig(
|
||||
client_cert=(os.path.join(cert_path, 'cert.pem'),
|
||||
os.path.join(cert_path, 'key.pem')),
|
||||
ca_cert=os.path.join(cert_path, 'ca.pem'),
|
||||
verify=tls_verify,
|
||||
ssl_version=ssl_version,
|
||||
assert_hostname=assert_hostname,
|
||||
)
|
||||
|
||||
return params
|
||||
|
@ -389,8 +403,8 @@ def convert_filters(filters):
|
|||
|
||||
|
||||
def datetime_to_timestamp(dt):
|
||||
"""Convert a UTC datetime to a Unix timestamp"""
|
||||
delta = dt - datetime.utcfromtimestamp(0)
|
||||
"""Convert a datetime to a Unix timestamp"""
|
||||
delta = dt.astimezone(timezone.utc) - datetime(1970, 1, 1, tzinfo=timezone.utc)
|
||||
return delta.seconds + delta.days * 24 * 3600
|
||||
|
||||
|
||||
|
@ -417,19 +431,18 @@ def parse_bytes(s):
|
|||
if suffix in units.keys() or suffix.isdigit():
|
||||
try:
|
||||
digits = float(digits_part)
|
||||
except ValueError:
|
||||
except ValueError as ve:
|
||||
raise errors.DockerException(
|
||||
'Failed converting the string value for memory ({}) to'
|
||||
' an integer.'.format(digits_part)
|
||||
)
|
||||
'Failed converting the string value for memory '
|
||||
f'({digits_part}) to an integer.'
|
||||
) from ve
|
||||
|
||||
# Reconvert to long for the final result
|
||||
s = int(digits * units[suffix])
|
||||
else:
|
||||
raise errors.DockerException(
|
||||
'The specified value for memory ({}) should specify the'
|
||||
' units. The postfix should be one of the `b` `k` `m` `g`'
|
||||
' characters'.format(s)
|
||||
f'The specified value for memory ({s}) should specify the units. '
|
||||
'The postfix should be one of the `b` `k` `m` `g` characters'
|
||||
)
|
||||
|
||||
return s
|
||||
|
@ -465,8 +478,7 @@ def parse_env_file(env_file):
|
|||
environment[k] = v
|
||||
else:
|
||||
raise errors.DockerException(
|
||||
'Invalid line in environment file {}:\n{}'.format(
|
||||
env_file, line))
|
||||
f'Invalid line in environment file {env_file}:\n{line}')
|
||||
|
||||
return environment
|
||||
|
||||
|
|
|
@ -1,14 +1,8 @@
|
|||
try:
|
||||
from ._version import __version__
|
||||
except ImportError:
|
||||
try:
|
||||
# importlib.metadata available in Python 3.8+, the fallback (0.0.0)
|
||||
# is fine because release builds use _version (above) rather than
|
||||
# this code path, so it only impacts developing w/ 3.7
|
||||
from importlib.metadata import version, PackageNotFoundError
|
||||
from importlib.metadata import PackageNotFoundError, version
|
||||
try:
|
||||
__version__ = version('docker')
|
||||
except PackageNotFoundError:
|
||||
__version__ = '0.0.0'
|
||||
except ImportError:
|
||||
__version__ = '0.0.0'
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
myst-parser==0.18.0
|
||||
Sphinx==5.1.1
|
|
@ -1,6 +1,104 @@
|
|||
Changelog
|
||||
==========
|
||||
|
||||
7.1.0
|
||||
-----
|
||||
### Upgrade Notes
|
||||
- Bumped minimum engine API version to 1.24
|
||||
- Bumped default engine API version to 1.44 (Moby 25.0)
|
||||
|
||||
### Bugfixes
|
||||
- Fixed issue with tag parsing when the registry address includes ports that resulted in `invalid tag format` errors
|
||||
- Fixed issue preventing creating new configs (`ConfigCollection`), which failed with a `KeyError` due to the `name` field
|
||||
- Fixed an issue due to an update in the [requests](https://github.com/psf/requests) package breaking `docker-py` by applying the [suggested fix](https://github.com/psf/requests/pull/6710)
|
||||
|
||||
### Miscellaneous
|
||||
- Documentation improvements
|
||||
- Updated Ruff (linter) and fixed minor linting issues
|
||||
- Packaging/CI updates
|
||||
- Started using hatch for packaging (https://github.com/pypa/hatch)
|
||||
- Updated `setup-python` github action
|
||||
- Updated tests
|
||||
- Stopped checking for deprecated container and image related fields (`Container` and `ContainerConfig`)
|
||||
- Updated tests that check `NetworkSettings.Networks.<network>.Aliases` due to engine changes
|
||||
|
||||
7.0.0
|
||||
-----
|
||||
### Upgrade Notes
|
||||
- Removed SSL version (`ssl_version`) and explicit hostname check (`assert_hostname`) options
|
||||
- `assert_hostname` has not been used since Python 3.6 and was removed in 3.12
|
||||
- Python 3.7+ supports TLSv1.3 by default
|
||||
- Websocket support is no longer included by default
|
||||
- Use `pip install docker[websockets]` to include `websocket-client` dependency
|
||||
- By default, `docker-py` hijacks the TCP connection and does not use Websockets
|
||||
- Websocket client is only required to use `attach_socket(container, ws=True)`
|
||||
- Python 3.7 no longer officially supported (reached end-of-life June 2023)
|
||||
|
||||
### Features
|
||||
- Python 3.12 support
|
||||
- Full `networking_config` support for `containers.create()`
|
||||
- Replaces `network_driver_opt` (added in 6.1.0)
|
||||
- Add `health()` property to container that returns status (e.g. `unhealthy`)
|
||||
- Add `pause` option to `container.commit()`
|
||||
- Add support for bind mount propagation (e.g. `rshared`, `private`)
|
||||
- Add `filters`, `keep_storage`, and `all` parameters to `prune_builds()` (requires API v1.39+)
|
||||
|
||||
### Bugfixes
|
||||
- Consistently return `docker.errors.NotFound` on 404 responses
|
||||
- Validate tag format before image push
|
||||
|
||||
### Miscellaneous
|
||||
- Upgraded urllib3 version in `requirements.txt` (used for development/tests)
|
||||
- Documentation typo fixes & formatting improvements
|
||||
- Fixed integration test compatibility for newer Moby engine versions
|
||||
- Switch to [ruff](https://github.com/astral-sh/ruff) for linting
|
||||
|
||||
6.1.3
|
||||
-----
|
||||
#### Bugfixes
|
||||
- Fix compatibility with [`eventlet/eventlet`](https://github.com/eventlet/eventlet)
|
||||
|
||||
6.1.2
|
||||
-----
|
||||
|
||||
#### Bugfixes
|
||||
- Fix for socket timeouts on long `docker exec` calls
|
||||
|
||||
6.1.1
|
||||
-----
|
||||
|
||||
#### Bugfixes
|
||||
- Fix `containers.stats()` hanging with `stream=True`
|
||||
- Correct return type in docs for `containers.diff()` method
|
||||
|
||||
|
||||
6.1.0
|
||||
-----
|
||||
|
||||
### Upgrade Notes
|
||||
- Errors are no longer returned during client initialization if the credential helper cannot be found. A warning will be emitted instead, and an error is returned if the credential helper is used.
|
||||
|
||||
### Features
|
||||
- Python 3.11 support
|
||||
- Use `poll()` instead of `select()` on non-Windows platforms
|
||||
- New API fields
|
||||
- `network_driver_opt` on container run / create
|
||||
- `one-shot` on container stats
|
||||
- `status` on services list
|
||||
|
||||
### Bugfixes
|
||||
- Support for requests 2.29.0+ and urllib3 2.x
|
||||
- Do not strip characters from volume names
|
||||
- Fix connection leak on container.exec_* operations
|
||||
- Fix errors closing named pipes on Windows
|
||||
|
||||
6.0.1
|
||||
-----
|
||||
|
||||
### Bugfixes
|
||||
- Fix for `The pipe has been ended errors` on Windows
|
||||
- Support floats for container log filtering by timestamp (`since` / `until`)
|
||||
|
||||
6.0.0
|
||||
-----
|
||||
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
import datetime
|
||||
import os
|
||||
import sys
|
||||
from importlib.metadata import version
|
||||
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
|
||||
|
||||
|
@ -56,7 +58,7 @@ master_doc = 'index'
|
|||
# General information about the project.
|
||||
project = 'Docker SDK for Python'
|
||||
year = datetime.datetime.now().year
|
||||
copyright = '%d Docker Inc' % year
|
||||
copyright = f'{year} Docker Inc'
|
||||
author = 'Docker Inc'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
|
@ -64,7 +66,6 @@ author = 'Docker Inc'
|
|||
# built documents.
|
||||
#
|
||||
# see https://github.com/pypa/setuptools_scm#usage-from-sphinx
|
||||
from importlib.metadata import version
|
||||
release = version('docker')
|
||||
# for example take major/minor
|
||||
version = '.'.join(release.split('.')[:2])
|
||||
|
|
|
@ -16,10 +16,13 @@ Prepare the command we are going to use. It prints "hello stdout"
|
|||
in `stdout`, followed by "hello stderr" in `stderr`:
|
||||
|
||||
>>> cmd = '/bin/sh -c "echo hello stdout ; echo hello stderr >&2"'
|
||||
|
||||
We'll run this command with all four the combinations of ``stream``
|
||||
and ``demux``.
|
||||
|
||||
With ``stream=False`` and ``demux=False``, the output is a string
|
||||
that contains both the `stdout` and the `stderr` output:
|
||||
|
||||
>>> res = container.exec_run(cmd, stream=False, demux=False)
|
||||
>>> res.output
|
||||
b'hello stderr\nhello stdout\n'
|
||||
|
@ -52,15 +55,8 @@ Traceback (most recent call last):
|
|||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
||||
|
||||
Finally, with ``stream=False`` and ``demux=True``, the whole output
|
||||
is returned, but the streams are still separated:
|
||||
Finally, with ``stream=False`` and ``demux=True``, the output is a tuple ``(stdout, stderr)``:
|
||||
|
||||
>>> res = container.exec_run(cmd, stream=True, demux=True)
|
||||
>>> next(res.output)
|
||||
(b'hello stdout\n', None)
|
||||
>>> next(res.output)
|
||||
(None, b'hello stderr\n')
|
||||
>>> next(res.output)
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
||||
>>> res = container.exec_run(cmd, stream=False, demux=True)
|
||||
>>> res.output
|
||||
(b'hello stdout\n', b'hello stderr\n')
|
103
pyproject.toml
103
pyproject.toml
|
@ -1,5 +1,102 @@
|
|||
[build-system]
|
||||
requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"]
|
||||
requires = ["hatchling", "hatch-vcs"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.setuptools_scm]
|
||||
write_to = 'docker/_version.py'
|
||||
[project]
|
||||
name = "docker"
|
||||
dynamic = ["version"]
|
||||
description = "A Python library for the Docker Engine API."
|
||||
readme = "README.md"
|
||||
license = "Apache-2.0"
|
||||
requires-python = ">=3.8"
|
||||
maintainers = [
|
||||
{ name = "Docker Inc.", email = "no-reply@docker.com" },
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Other Environment",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Topic :: Software Development",
|
||||
"Topic :: Utilities",
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"requests >= 2.26.0",
|
||||
"urllib3 >= 1.26.0",
|
||||
"pywin32>=304; sys_platform == \"win32\"",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
# ssh feature allows DOCKER_HOST=ssh://... style connections
|
||||
ssh = [
|
||||
"paramiko>=2.4.3",
|
||||
]
|
||||
# tls is always supported, the feature is a no-op for backwards compatibility
|
||||
tls = []
|
||||
# websockets can be used as an alternate container attach mechanism but
|
||||
# by default docker-py hijacks the TCP connection and does not use Websockets
|
||||
# unless attach_socket(container, ws=True) is called
|
||||
websockets = [
|
||||
"websocket-client >= 1.3.0",
|
||||
]
|
||||
# docs are dependencies required to build the ReadTheDocs site
|
||||
# this is only needed for CI / working on the docs!
|
||||
docs = [
|
||||
"myst-parser==0.18.0",
|
||||
"Sphinx==5.1.1",
|
||||
|
||||
]
|
||||
# dev are dependencies required to test & lint this project
|
||||
# this is only needed if you are making code changes to docker-py!
|
||||
dev = [
|
||||
"coverage==7.2.7",
|
||||
"pytest==7.4.2",
|
||||
"pytest-cov==4.1.0",
|
||||
"pytest-timeout==2.1.0",
|
||||
"ruff==0.1.8",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Changelog = "https://docker-py.readthedocs.io/en/stable/change-log.html"
|
||||
Documentation = "https://docker-py.readthedocs.io"
|
||||
Homepage = "https://github.com/docker/docker-py"
|
||||
Source = "https://github.com/docker/docker-py"
|
||||
Tracker = "https://github.com/docker/docker-py/issues"
|
||||
|
||||
[tool.hatch.version]
|
||||
source = "vcs"
|
||||
|
||||
[tool.hatch.build.hooks.vcs]
|
||||
version-file = "docker/_version.py"
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = [
|
||||
"/docker",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py38"
|
||||
extend-select = [
|
||||
"B",
|
||||
"C",
|
||||
"F",
|
||||
"I",
|
||||
"UP",
|
||||
"W",
|
||||
]
|
||||
ignore = [
|
||||
"UP012", # unnecessary `UTF-8` argument (we want to be explicit)
|
||||
"C901", # too complex (there's a whole bunch of these)
|
||||
]
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"**/__init__.py" = ["F401"]
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
packaging==21.3
|
||||
paramiko==2.11.0
|
||||
pywin32==304; sys_platform == 'win32'
|
||||
requests==2.28.1
|
||||
urllib3==1.26.11
|
||||
websocket-client==1.3.3
|
|
@ -1,3 +0,0 @@
|
|||
[metadata]
|
||||
description_file = README.rst
|
||||
license = Apache License 2.0
|
80
setup.py
80
setup.py
|
@ -1,80 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import codecs
|
||||
import os
|
||||
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
ROOT_DIR = os.path.dirname(__file__)
|
||||
SOURCE_DIR = os.path.join(ROOT_DIR)
|
||||
|
||||
requirements = [
|
||||
'packaging >= 14.0',
|
||||
'requests >= 2.26.0',
|
||||
'urllib3 >= 1.26.0',
|
||||
'websocket-client >= 0.32.0',
|
||||
]
|
||||
|
||||
extras_require = {
|
||||
# win32 APIs if on Windows (required for npipe support)
|
||||
':sys_platform == "win32"': 'pywin32>=304',
|
||||
|
||||
# This is now a no-op, as similarly the requests[security] extra is
|
||||
# a no-op as of requests 2.26.0, this is always available/by default now
|
||||
# see https://github.com/psf/requests/pull/5867
|
||||
'tls': [],
|
||||
|
||||
# Only required when connecting using the ssh:// protocol
|
||||
'ssh': ['paramiko>=2.4.3'],
|
||||
}
|
||||
|
||||
with open('./test-requirements.txt') as test_reqs_txt:
|
||||
test_requirements = [line for line in test_reqs_txt]
|
||||
|
||||
|
||||
long_description = ''
|
||||
with codecs.open('./README.md', encoding='utf-8') as readme_md:
|
||||
long_description = readme_md.read()
|
||||
|
||||
setup(
|
||||
name="docker",
|
||||
use_scm_version={
|
||||
'write_to': 'docker/_version.py'
|
||||
},
|
||||
description="A Python library for the Docker Engine API.",
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/markdown',
|
||||
url='https://github.com/docker/docker-py',
|
||||
project_urls={
|
||||
'Documentation': 'https://docker-py.readthedocs.io',
|
||||
'Changelog': 'https://docker-py.readthedocs.io/en/stable/change-log.html', # noqa: E501
|
||||
'Source': 'https://github.com/docker/docker-py',
|
||||
'Tracker': 'https://github.com/docker/docker-py/issues',
|
||||
},
|
||||
packages=find_packages(exclude=["tests.*", "tests"]),
|
||||
setup_requires=['setuptools_scm'],
|
||||
install_requires=requirements,
|
||||
tests_require=test_requirements,
|
||||
extras_require=extras_require,
|
||||
python_requires='>=3.7',
|
||||
zip_safe=False,
|
||||
test_suite='tests',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Other Environment',
|
||||
'Intended Audience :: Developers',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Topic :: Software Development',
|
||||
'Topic :: Utilities',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
],
|
||||
maintainer='Ulysses Souza',
|
||||
maintainer_email='ulysses.souza@docker.com',
|
||||
)
|
|
@ -1,6 +0,0 @@
|
|||
setuptools==63.2.0
|
||||
coverage==6.4.2
|
||||
flake8==4.0.1
|
||||
pytest==7.1.2
|
||||
pytest-cov==3.0.0
|
||||
pytest-timeout==2.1.0
|
|
@ -1,13 +1,8 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG PYTHON_VERSION=3.10
|
||||
|
||||
ARG PYTHON_VERSION=3.12
|
||||
FROM python:${PYTHON_VERSION}
|
||||
|
||||
ARG APT_MIRROR
|
||||
RUN sed -ri "s/(httpredir|deb).debian.org/${APT_MIRROR:-deb.debian.org}/g" /etc/apt/sources.list \
|
||||
&& sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list
|
||||
|
||||
RUN apt-get update && apt-get -y install --no-install-recommends \
|
||||
gnupg2 \
|
||||
pass
|
||||
|
@ -31,16 +26,10 @@ RUN curl -sSL -o /opt/docker-credential-pass.tar.gz \
|
|||
chmod +x /usr/local/bin/docker-credential-pass
|
||||
|
||||
WORKDIR /src
|
||||
COPY . .
|
||||
|
||||
COPY requirements.txt /src/requirements.txt
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install -r requirements.txt
|
||||
|
||||
COPY test-requirements.txt /src/test-requirements.txt
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install -r test-requirements.txt
|
||||
|
||||
COPY . /src
|
||||
ARG SETUPTOOLS_SCM_PRETEND_VERSION=99.0.0+docker
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install -e .
|
||||
ARG VERSION=0.0.0.dev0
|
||||
RUN --mount=type=cache,target=/cache/pip \
|
||||
PIP_CACHE_DIR=/cache/pip \
|
||||
SETUPTOOLS_SCM_PRETEND_VERSION=${VERSION} \
|
||||
pip install .[dev,ssh,websockets]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG PYTHON_VERSION=3.10
|
||||
ARG PYTHON_VERSION=3.12
|
||||
|
||||
FROM python:${PYTHON_VERSION}
|
||||
RUN mkdir /tmp/certs
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG API_VERSION=1.41
|
||||
ARG ENGINE_VERSION=20.10
|
||||
ARG API_VERSION=1.45
|
||||
ARG ENGINE_VERSION=26.1
|
||||
|
||||
FROM docker:${ENGINE_VERSION}-dind
|
||||
|
||||
|
|
|
@ -8,10 +8,11 @@ import tarfile
|
|||
import tempfile
|
||||
import time
|
||||
|
||||
import docker
|
||||
import paramiko
|
||||
import pytest
|
||||
|
||||
import docker
|
||||
|
||||
|
||||
def make_tree(dirs, files):
|
||||
base = tempfile.mkdtemp()
|
||||
|
@ -46,6 +47,19 @@ def untar_file(tardata, filename):
|
|||
return result
|
||||
|
||||
|
||||
def skip_if_desktop():
|
||||
def fn(f):
|
||||
@functools.wraps(f)
|
||||
def wrapped(self, *args, **kwargs):
|
||||
info = self.client.info()
|
||||
if info['Name'] == 'docker-desktop':
|
||||
pytest.skip('Test does not support Docker Desktop')
|
||||
return f(self, *args, **kwargs)
|
||||
|
||||
return wrapped
|
||||
|
||||
return fn
|
||||
|
||||
def requires_api_version(version):
|
||||
test_version = os.environ.get(
|
||||
'DOCKER_TEST_API_VERSION', docker.constants.DEFAULT_DOCKER_API_VERSION
|
||||
|
@ -80,7 +94,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40):
|
|||
start_time = time.time()
|
||||
while not condition():
|
||||
if time.time() - start_time > timeout:
|
||||
raise AssertionError("Timeout: %s" % condition)
|
||||
raise AssertionError(f"Timeout: {condition}")
|
||||
time.sleep(delay)
|
||||
|
||||
|
||||
|
|
|
@ -3,13 +3,13 @@ import os
|
|||
import shutil
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
from docker import errors
|
||||
from docker.utils.proxy import ProxyConfig
|
||||
|
||||
import pytest
|
||||
|
||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
||||
from ..helpers import random_name, requires_api_version, requires_experimental
|
||||
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||
|
||||
|
||||
class BuildTest(BaseAPIIntegrationTest):
|
||||
|
@ -132,7 +132,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
path=base_dir,
|
||||
tag=tag,
|
||||
)
|
||||
for chunk in stream:
|
||||
for _chunk in stream:
|
||||
pass
|
||||
|
||||
c = self.client.create_container(tag, ['find', '/test', '-type', 'f'])
|
||||
|
@ -142,7 +142,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
|
||||
logs = logs.decode('utf-8')
|
||||
|
||||
assert sorted(list(filter(None, logs.split('\n')))) == sorted([
|
||||
assert sorted(filter(None, logs.split('\n'))) == sorted([
|
||||
'/test/#file.txt',
|
||||
'/test/ignored/subdir/excepted-with-spaces',
|
||||
'/test/ignored/subdir/excepted-file',
|
||||
|
@ -160,7 +160,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
fileobj=script, tag='buildargs', buildargs={'test': 'OK'}
|
||||
)
|
||||
self.tmp_imgs.append('buildargs')
|
||||
for chunk in stream:
|
||||
for _chunk in stream:
|
||||
pass
|
||||
|
||||
info = self.client.inspect_image('buildargs')
|
||||
|
@ -180,7 +180,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
fileobj=script, tag=tag, shmsize=shmsize
|
||||
)
|
||||
self.tmp_imgs.append(tag)
|
||||
for chunk in stream:
|
||||
for _chunk in stream:
|
||||
pass
|
||||
|
||||
# There is currently no way to get the shmsize
|
||||
|
@ -198,7 +198,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
isolation='default'
|
||||
)
|
||||
|
||||
for chunk in stream:
|
||||
for _chunk in stream:
|
||||
pass
|
||||
|
||||
@requires_api_version('1.23')
|
||||
|
@ -213,7 +213,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
fileobj=script, tag='labels', labels=labels
|
||||
)
|
||||
self.tmp_imgs.append('labels')
|
||||
for chunk in stream:
|
||||
for _chunk in stream:
|
||||
pass
|
||||
|
||||
info = self.client.inspect_image('labels')
|
||||
|
@ -230,7 +230,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
|
||||
stream = self.client.build(fileobj=script, tag='build1')
|
||||
self.tmp_imgs.append('build1')
|
||||
for chunk in stream:
|
||||
for _chunk in stream:
|
||||
pass
|
||||
|
||||
stream = self.client.build(
|
||||
|
@ -271,11 +271,11 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
fileobj=script, target='first', tag='build1'
|
||||
)
|
||||
self.tmp_imgs.append('build1')
|
||||
for chunk in stream:
|
||||
for _chunk in stream:
|
||||
pass
|
||||
|
||||
info = self.client.inspect_image('build1')
|
||||
assert not info['Config']['OnBuild']
|
||||
assert 'OnBuild' not in info['Config'] or not info['Config']['OnBuild']
|
||||
|
||||
@requires_api_version('1.25')
|
||||
def test_build_with_network_mode(self):
|
||||
|
@ -300,7 +300,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
)
|
||||
|
||||
self.tmp_imgs.append('dockerpytest_customnetbuild')
|
||||
for chunk in stream:
|
||||
for _chunk in stream:
|
||||
pass
|
||||
|
||||
assert self.client.inspect_image('dockerpytest_customnetbuild')
|
||||
|
@ -312,7 +312,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
)
|
||||
|
||||
self.tmp_imgs.append('dockerpytest_nonebuild')
|
||||
logs = [chunk for chunk in stream]
|
||||
logs = list(stream)
|
||||
assert 'errorDetail' in logs[-1]
|
||||
assert logs[-1]['errorDetail']['code'] == 1
|
||||
|
||||
|
@ -365,7 +365,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
fileobj=script, tag=tag, squash=squash
|
||||
)
|
||||
self.tmp_imgs.append(tag)
|
||||
for chunk in stream:
|
||||
for _chunk in stream:
|
||||
pass
|
||||
|
||||
return self.client.inspect_image(tag)
|
||||
|
@ -389,10 +389,8 @@ class BuildTest(BaseAPIIntegrationTest):
|
|||
lines = []
|
||||
for chunk in stream:
|
||||
lines.append(chunk.get('stream'))
|
||||
expected = '{0}{2}\n{1}'.format(
|
||||
control_chars[0], control_chars[1], snippet
|
||||
)
|
||||
assert any([line == expected for line in lines])
|
||||
expected = f'{control_chars[0]}{snippet}\n{control_chars[1]}'
|
||||
assert any(line == expected for line in lines)
|
||||
|
||||
def test_build_gzip_encoding(self):
|
||||
base_dir = tempfile.mkdtemp()
|
||||
|
|
|
@ -47,7 +47,7 @@ class ConnectionTimeoutTest(unittest.TestCase):
|
|||
# This call isn't supposed to complete, and it should fail fast.
|
||||
try:
|
||||
res = self.client.inspect_container('id')
|
||||
except: # noqa: E722
|
||||
except Exception:
|
||||
pass
|
||||
end = time.time()
|
||||
assert res is None
|
||||
|
@ -72,6 +72,4 @@ class UnixconnTest(unittest.TestCase):
|
|||
client.close()
|
||||
del client
|
||||
|
||||
assert len(w) == 0, "No warnings produced: {}".format(
|
||||
w[0].message
|
||||
)
|
||||
assert len(w) == 0, f"No warnings produced: {w[0].message}"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import docker
|
||||
import pytest
|
||||
|
||||
import docker
|
||||
|
||||
from ..helpers import force_leave_swarm, requires_api_version
|
||||
from .base import BaseAPIIntegrationTest
|
||||
|
||||
|
|
|
@ -9,15 +9,17 @@ import pytest
|
|||
import requests
|
||||
|
||||
import docker
|
||||
from .. import helpers
|
||||
from ..helpers import assert_cat_socket_detached_with_keys
|
||||
from ..helpers import ctrl_with
|
||||
from ..helpers import requires_api_version
|
||||
from .base import BaseAPIIntegrationTest
|
||||
from .base import TEST_IMG
|
||||
from docker.constants import IS_WINDOWS_PLATFORM
|
||||
from docker.utils.socket import next_frame_header
|
||||
from docker.utils.socket import read_exactly
|
||||
from docker.utils.socket import next_frame_header, read_exactly
|
||||
|
||||
from .. import helpers
|
||||
from ..helpers import (
|
||||
assert_cat_socket_detached_with_keys,
|
||||
ctrl_with,
|
||||
requires_api_version,
|
||||
skip_if_desktop,
|
||||
)
|
||||
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||
|
||||
|
||||
class ListContainersTest(BaseAPIIntegrationTest):
|
||||
|
@ -122,8 +124,8 @@ class CreateContainerTest(BaseAPIIntegrationTest):
|
|||
self.client.wait(id)
|
||||
with pytest.raises(docker.errors.APIError) as exc:
|
||||
self.client.remove_container(id)
|
||||
err = exc.value.explanation
|
||||
assert 'You cannot remove ' in err
|
||||
err = exc.value.explanation.lower()
|
||||
assert 'stop the container before' in err
|
||||
self.client.remove_container(id, force=True)
|
||||
|
||||
def test_create_container_with_volumes_from(self):
|
||||
|
@ -542,6 +544,27 @@ class VolumeBindTest(BaseAPIIntegrationTest):
|
|||
inspect_data = self.client.inspect_container(container)
|
||||
self.check_container_data(inspect_data, False)
|
||||
|
||||
@skip_if_desktop()
|
||||
def test_create_with_binds_rw_rshared(self):
|
||||
container = self.run_with_volume_propagation(
|
||||
False,
|
||||
'rshared',
|
||||
TEST_IMG,
|
||||
['touch', os.path.join(self.mount_dest, self.filename)],
|
||||
)
|
||||
inspect_data = self.client.inspect_container(container)
|
||||
self.check_container_data(inspect_data, True, 'rshared')
|
||||
container = self.run_with_volume_propagation(
|
||||
True,
|
||||
'rshared',
|
||||
TEST_IMG,
|
||||
['ls', self.mount_dest],
|
||||
)
|
||||
logs = self.client.logs(container).decode('utf-8')
|
||||
assert self.filename in logs
|
||||
inspect_data = self.client.inspect_container(container)
|
||||
self.check_container_data(inspect_data, False, 'rshared')
|
||||
|
||||
@requires_api_version('1.30')
|
||||
def test_create_with_mounts(self):
|
||||
mount = docker.types.Mount(
|
||||
|
@ -597,7 +620,57 @@ class VolumeBindTest(BaseAPIIntegrationTest):
|
|||
assert mount['Source'] == mount_data['Name']
|
||||
assert mount_data['RW'] is True
|
||||
|
||||
def check_container_data(self, inspect_data, rw):
|
||||
@requires_api_version('1.45')
|
||||
def test_create_with_subpath_volume_mount(self):
|
||||
source_volume = helpers.random_name()
|
||||
self.client.create_volume(name=source_volume)
|
||||
|
||||
setup_container = None
|
||||
test_container = None
|
||||
|
||||
|
||||
# Create a file structure in the volume to test with
|
||||
setup_container = self.client.create_container(
|
||||
TEST_IMG,
|
||||
[
|
||||
"sh",
|
||||
"-c",
|
||||
'mkdir -p /vol/subdir && echo "test content" > /vol/subdir/testfile.txt',
|
||||
],
|
||||
host_config=self.client.create_host_config(
|
||||
binds=[f"{source_volume}:/vol"]
|
||||
),
|
||||
)
|
||||
self.client.start(setup_container)
|
||||
self.client.wait(setup_container)
|
||||
|
||||
# Now test with subpath
|
||||
mount = docker.types.Mount(
|
||||
type="volume",
|
||||
source=source_volume,
|
||||
target=self.mount_dest,
|
||||
read_only=True,
|
||||
subpath="subdir",
|
||||
)
|
||||
|
||||
|
||||
host_config = self.client.create_host_config(mounts=[mount])
|
||||
test_container = self.client.create_container(
|
||||
TEST_IMG,
|
||||
["cat", os.path.join(self.mount_dest, "testfile.txt")],
|
||||
host_config=host_config,
|
||||
)
|
||||
|
||||
self.client.start(test_container)
|
||||
self.client.wait(test_container) # Wait for container to finish
|
||||
output = self.client.logs(test_container).decode("utf-8").strip()
|
||||
|
||||
# If the subpath feature is working, we should be able to see the content
|
||||
# of the file in the subdir
|
||||
assert output == "test content"
|
||||
|
||||
|
||||
def check_container_data(self, inspect_data, rw, propagation='rprivate'):
|
||||
assert 'Mounts' in inspect_data
|
||||
filtered = list(filter(
|
||||
lambda x: x['Destination'] == self.mount_dest,
|
||||
|
@ -607,6 +680,7 @@ class VolumeBindTest(BaseAPIIntegrationTest):
|
|||
mount_data = filtered[0]
|
||||
assert mount_data['Source'] == self.mount_origin
|
||||
assert mount_data['RW'] == rw
|
||||
assert mount_data['Propagation'] == propagation
|
||||
|
||||
def run_with_volume(self, ro, *args, **kwargs):
|
||||
return self.run_container(
|
||||
|
@ -624,6 +698,23 @@ class VolumeBindTest(BaseAPIIntegrationTest):
|
|||
**kwargs
|
||||
)
|
||||
|
||||
def run_with_volume_propagation(self, ro, propagation, *args, **kwargs):
|
||||
return self.run_container(
|
||||
*args,
|
||||
volumes={self.mount_dest: {}},
|
||||
host_config=self.client.create_host_config(
|
||||
binds={
|
||||
self.mount_origin: {
|
||||
'bind': self.mount_dest,
|
||||
'ro': ro,
|
||||
'propagation': propagation
|
||||
},
|
||||
},
|
||||
network_mode='none'
|
||||
),
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
class ArchiveTest(BaseAPIIntegrationTest):
|
||||
def test_get_file_archive_from_container(self):
|
||||
|
@ -666,9 +757,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
|
|||
test_file.seek(0)
|
||||
ctnr = self.client.create_container(
|
||||
TEST_IMG,
|
||||
'cat {}'.format(
|
||||
os.path.join('/vol1/', os.path.basename(test_file.name))
|
||||
),
|
||||
f"cat {os.path.join('/vol1/', os.path.basename(test_file.name))}",
|
||||
volumes=['/vol1']
|
||||
)
|
||||
self.tmp_containers.append(ctnr)
|
||||
|
@ -826,7 +915,7 @@ class LogsTest(BaseAPIIntegrationTest):
|
|||
exitcode = self.client.wait(id)['StatusCode']
|
||||
assert exitcode == 0
|
||||
logs = self.client.logs(id)
|
||||
assert logs == (snippet + '\n').encode(encoding='ascii')
|
||||
assert logs == f"{snippet}\n".encode(encoding='ascii')
|
||||
|
||||
def test_logs_tail_option(self):
|
||||
snippet = '''Line1
|
||||
|
@ -857,7 +946,7 @@ Line2'''
|
|||
exitcode = self.client.wait(id)['StatusCode']
|
||||
assert exitcode == 0
|
||||
|
||||
assert logs == (snippet + '\n').encode(encoding='ascii')
|
||||
assert logs == f"{snippet}\n".encode(encoding='ascii')
|
||||
|
||||
@pytest.mark.timeout(5)
|
||||
@pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
|
||||
|
@ -878,7 +967,7 @@ Line2'''
|
|||
for chunk in generator:
|
||||
logs += chunk
|
||||
|
||||
assert logs == (snippet + '\n').encode(encoding='ascii')
|
||||
assert logs == f"{snippet}\n".encode(encoding='ascii')
|
||||
|
||||
def test_logs_with_dict_instead_of_id(self):
|
||||
snippet = 'Flowering Nights (Sakuya Iyazoi)'
|
||||
|
@ -891,7 +980,7 @@ Line2'''
|
|||
exitcode = self.client.wait(id)['StatusCode']
|
||||
assert exitcode == 0
|
||||
logs = self.client.logs(container)
|
||||
assert logs == (snippet + '\n').encode(encoding='ascii')
|
||||
assert logs == f"{snippet}\n".encode(encoding='ascii')
|
||||
|
||||
def test_logs_with_tail_0(self):
|
||||
snippet = 'Flowering Nights (Sakuya Iyazoi)'
|
||||
|
@ -920,7 +1009,7 @@ Line2'''
|
|||
logs_until_1 = self.client.logs(container, until=1)
|
||||
assert logs_until_1 == b''
|
||||
logs_until_now = self.client.logs(container, datetime.now())
|
||||
assert logs_until_now == (snippet + '\n').encode(encoding='ascii')
|
||||
assert logs_until_now == f"{snippet}\n".encode(encoding='ascii')
|
||||
|
||||
|
||||
class DiffTest(BaseAPIIntegrationTest):
|
||||
|
@ -1086,7 +1175,7 @@ class PortTest(BaseAPIIntegrationTest):
|
|||
|
||||
ip, host_port = port_binding['HostIp'], port_binding['HostPort']
|
||||
|
||||
port_binding = port if not protocol else port + "/" + protocol
|
||||
port_binding = port if not protocol else f"{port}/{protocol}"
|
||||
assert ip == port_bindings[port_binding][0]
|
||||
assert host_port == port_bindings[port_binding][1]
|
||||
|
||||
|
@ -1392,7 +1481,7 @@ class GetContainerStatsTest(BaseAPIIntegrationTest):
|
|||
response = self.client.stats(container, stream=0)
|
||||
self.client.kill(container)
|
||||
|
||||
assert type(response) == dict
|
||||
assert isinstance(response, dict)
|
||||
for key in ['read', 'networks', 'precpu_stats', 'cpu_stats',
|
||||
'memory_stats', 'blkio_stats']:
|
||||
assert key in response
|
||||
|
@ -1405,7 +1494,7 @@ class GetContainerStatsTest(BaseAPIIntegrationTest):
|
|||
self.client.start(container)
|
||||
stream = self.client.stats(container)
|
||||
for chunk in stream:
|
||||
assert type(chunk) == dict
|
||||
assert isinstance(chunk, dict)
|
||||
for key in ['read', 'network', 'precpu_stats', 'cpu_stats',
|
||||
'memory_stats', 'blkio_stats']:
|
||||
assert key in chunk
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
from ..helpers import assert_cat_socket_detached_with_keys
|
||||
from ..helpers import ctrl_with
|
||||
from ..helpers import requires_api_version
|
||||
from .base import BaseAPIIntegrationTest
|
||||
from .base import TEST_IMG
|
||||
from docker.utils.proxy import ProxyConfig
|
||||
from docker.utils.socket import next_frame_header
|
||||
from docker.utils.socket import read_exactly
|
||||
from docker.utils.socket import next_frame_header, read_exactly
|
||||
|
||||
from ..helpers import (
|
||||
assert_cat_socket_detached_with_keys,
|
||||
ctrl_with,
|
||||
requires_api_version,
|
||||
)
|
||||
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||
|
||||
|
||||
class ExecTest(BaseAPIIntegrationTest):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
||||
from .. import helpers
|
||||
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||
|
||||
SECOND = 1000000000
|
||||
|
||||
|
@ -16,7 +16,7 @@ class HealthcheckTest(BaseAPIIntegrationTest):
|
|||
@helpers.requires_api_version('1.24')
|
||||
def test_healthcheck_shell_command(self):
|
||||
container = self.client.create_container(
|
||||
TEST_IMG, 'top', healthcheck=dict(test='echo "hello world"'))
|
||||
TEST_IMG, 'top', healthcheck={'test': 'echo "hello world"'})
|
||||
self.tmp_containers.append(container)
|
||||
|
||||
res = self.client.inspect_container(container)
|
||||
|
@ -27,12 +27,12 @@ class HealthcheckTest(BaseAPIIntegrationTest):
|
|||
@helpers.requires_api_version('1.24')
|
||||
def test_healthcheck_passes(self):
|
||||
container = self.client.create_container(
|
||||
TEST_IMG, 'top', healthcheck=dict(
|
||||
test="true",
|
||||
interval=1 * SECOND,
|
||||
timeout=1 * SECOND,
|
||||
retries=1,
|
||||
))
|
||||
TEST_IMG, 'top', healthcheck={
|
||||
'test': "true",
|
||||
'interval': 1 * SECOND,
|
||||
'timeout': 1 * SECOND,
|
||||
'retries': 1,
|
||||
})
|
||||
self.tmp_containers.append(container)
|
||||
self.client.start(container)
|
||||
wait_on_health_status(self.client, container, "healthy")
|
||||
|
@ -40,12 +40,12 @@ class HealthcheckTest(BaseAPIIntegrationTest):
|
|||
@helpers.requires_api_version('1.24')
|
||||
def test_healthcheck_fails(self):
|
||||
container = self.client.create_container(
|
||||
TEST_IMG, 'top', healthcheck=dict(
|
||||
test="false",
|
||||
interval=1 * SECOND,
|
||||
timeout=1 * SECOND,
|
||||
retries=1,
|
||||
))
|
||||
TEST_IMG, 'top', healthcheck={
|
||||
'test': "false",
|
||||
'interval': 1 * SECOND,
|
||||
'timeout': 1 * SECOND,
|
||||
'retries': 1,
|
||||
})
|
||||
self.tmp_containers.append(container)
|
||||
self.client.start(container)
|
||||
wait_on_health_status(self.client, container, "unhealthy")
|
||||
|
@ -53,14 +53,14 @@ class HealthcheckTest(BaseAPIIntegrationTest):
|
|||
@helpers.requires_api_version('1.29')
|
||||
def test_healthcheck_start_period(self):
|
||||
container = self.client.create_container(
|
||||
TEST_IMG, 'top', healthcheck=dict(
|
||||
test="echo 'x' >> /counter.txt && "
|
||||
TEST_IMG, 'top', healthcheck={
|
||||
'test': "echo 'x' >> /counter.txt && "
|
||||
"test `cat /counter.txt | wc -l` -ge 3",
|
||||
interval=1 * SECOND,
|
||||
timeout=1 * SECOND,
|
||||
retries=1,
|
||||
start_period=3 * SECOND
|
||||
)
|
||||
'interval': 1 * SECOND,
|
||||
'timeout': 1 * SECOND,
|
||||
'retries': 1,
|
||||
'start_period': 3 * SECOND
|
||||
}
|
||||
)
|
||||
|
||||
self.tmp_containers.append(container)
|
||||
|
|
|
@ -2,19 +2,18 @@ import contextlib
|
|||
import json
|
||||
import shutil
|
||||
import socket
|
||||
import socketserver
|
||||
import tarfile
|
||||
import tempfile
|
||||
import threading
|
||||
from http.server import SimpleHTTPRequestHandler
|
||||
|
||||
import pytest
|
||||
from http.server import SimpleHTTPRequestHandler
|
||||
import socketserver
|
||||
|
||||
|
||||
import docker
|
||||
|
||||
from ..helpers import requires_api_version, requires_experimental
|
||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
||||
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||
|
||||
|
||||
class ListImagesTest(BaseAPIIntegrationTest):
|
||||
|
@ -32,7 +31,7 @@ class ListImagesTest(BaseAPIIntegrationTest):
|
|||
|
||||
def test_images_quiet(self):
|
||||
res1 = self.client.images(quiet=True)
|
||||
assert type(res1[0]) == str
|
||||
assert isinstance(res1[0], str)
|
||||
|
||||
|
||||
class PullImageTest(BaseAPIIntegrationTest):
|
||||
|
@ -43,7 +42,7 @@ class PullImageTest(BaseAPIIntegrationTest):
|
|||
pass
|
||||
res = self.client.pull('hello-world')
|
||||
self.tmp_imgs.append('hello-world')
|
||||
assert type(res) == str
|
||||
assert isinstance(res, str)
|
||||
assert len(self.client.images('hello-world')) >= 1
|
||||
img_info = self.client.inspect_image('hello-world')
|
||||
assert 'Id' in img_info
|
||||
|
@ -85,13 +84,8 @@ class CommitTest(BaseAPIIntegrationTest):
|
|||
img_id = res['Id']
|
||||
self.tmp_imgs.append(img_id)
|
||||
img = self.client.inspect_image(img_id)
|
||||
assert 'Container' in img
|
||||
assert img['Container'].startswith(id)
|
||||
assert 'ContainerConfig' in img
|
||||
assert 'Image' in img['ContainerConfig']
|
||||
assert TEST_IMG == img['ContainerConfig']['Image']
|
||||
busybox_id = self.client.inspect_image(TEST_IMG)['Id']
|
||||
assert 'Parent' in img
|
||||
busybox_id = self.client.inspect_image(TEST_IMG)['Id']
|
||||
assert img['Parent'] == busybox_id
|
||||
|
||||
def test_commit_with_changes(self):
|
||||
|
@ -103,8 +97,6 @@ class CommitTest(BaseAPIIntegrationTest):
|
|||
)
|
||||
self.tmp_imgs.append(img_id)
|
||||
img = self.client.inspect_image(img_id)
|
||||
assert 'Container' in img
|
||||
assert img['Container'].startswith(cid['Id'])
|
||||
assert '8000/tcp' in img['Config']['ExposedPorts']
|
||||
assert img['Config']['Cmd'] == ['bash']
|
||||
|
||||
|
@ -263,10 +255,8 @@ class ImportImageTest(BaseAPIIntegrationTest):
|
|||
data = self.client.get_image(test_img)
|
||||
assert data
|
||||
output = self.client.load_image(data)
|
||||
assert any([
|
||||
line for line in output
|
||||
if f'Loaded image: {test_img}' in line.get('stream', '')
|
||||
])
|
||||
assert any(line for line in output
|
||||
if f'Loaded image: {test_img}' in line.get('stream', ''))
|
||||
|
||||
@contextlib.contextmanager
|
||||
def temporary_http_file_server(self, stream):
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import docker
|
||||
from docker.types import IPAMConfig, IPAMPool
|
||||
import pytest
|
||||
|
||||
import docker
|
||||
from docker.types import IPAMConfig, IPAMPool
|
||||
|
||||
from ..helpers import random_name, requires_api_version
|
||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
||||
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||
|
||||
|
||||
class TestNetworks(BaseAPIIntegrationTest):
|
||||
|
@ -233,7 +234,7 @@ class TestNetworks(BaseAPIIntegrationTest):
|
|||
net_name, net_id = self.create_network(
|
||||
ipam=IPAMConfig(
|
||||
driver='default',
|
||||
pool_configs=[IPAMPool(subnet="2001:389::1/64")],
|
||||
pool_configs=[IPAMPool(subnet="2001:389::/64")],
|
||||
),
|
||||
)
|
||||
container = self.client.create_container(
|
||||
|
@ -327,8 +328,6 @@ class TestNetworks(BaseAPIIntegrationTest):
|
|||
net_name, net_id = self.create_network()
|
||||
with pytest.raises(docker.errors.APIError):
|
||||
self.client.create_network(net_name, check_duplicate=True)
|
||||
net_id = self.client.create_network(net_name, check_duplicate=False)
|
||||
self.tmp_networks.append(net_id['Id'])
|
||||
|
||||
@requires_api_version('1.22')
|
||||
def test_connect_with_links(self):
|
||||
|
@ -389,7 +388,7 @@ class TestNetworks(BaseAPIIntegrationTest):
|
|||
driver='default',
|
||||
pool_configs=[
|
||||
IPAMPool(
|
||||
subnet="2001:389::1/64", iprange="2001:389::0/96",
|
||||
subnet="2001:389::/64", iprange="2001:389::0/96",
|
||||
gateway="2001:389::ffff"
|
||||
)
|
||||
]
|
||||
|
@ -455,7 +454,7 @@ class TestNetworks(BaseAPIIntegrationTest):
|
|||
driver='default',
|
||||
pool_configs=[
|
||||
IPAMPool(
|
||||
subnet="2001:389::1/64", iprange="2001:389::0/96",
|
||||
subnet="2001:389::/64", iprange="2001:389::0/96",
|
||||
gateway="2001:389::ffff"
|
||||
)
|
||||
]
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import os
|
||||
|
||||
import docker
|
||||
import pytest
|
||||
|
||||
from .base import BaseAPIIntegrationTest
|
||||
import docker
|
||||
|
||||
from ..helpers import requires_api_version
|
||||
from .base import BaseAPIIntegrationTest
|
||||
|
||||
SSHFS = 'vieux/sshfs:latest'
|
||||
|
||||
|
@ -39,7 +40,7 @@ class PluginTest(BaseAPIIntegrationTest):
|
|||
return self.client.inspect_plugin(plugin_name)
|
||||
except docker.errors.NotFound:
|
||||
prv = self.client.plugin_privileges(plugin_name)
|
||||
for d in self.client.pull_plugin(plugin_name, prv):
|
||||
for _d in self.client.pull_plugin(plugin_name, prv):
|
||||
pass
|
||||
return self.client.inspect_plugin(plugin_name)
|
||||
|
||||
|
@ -118,7 +119,7 @@ class PluginTest(BaseAPIIntegrationTest):
|
|||
pass
|
||||
|
||||
prv = self.client.plugin_privileges(SSHFS)
|
||||
logs = [d for d in self.client.pull_plugin(SSHFS, prv)]
|
||||
logs = list(self.client.pull_plugin(SSHFS, prv))
|
||||
assert filter(lambda x: x['status'] == 'Download complete', logs)
|
||||
assert self.client.inspect_plugin(SSHFS)
|
||||
assert self.client.enable_plugin(SSHFS)
|
||||
|
@ -128,7 +129,7 @@ class PluginTest(BaseAPIIntegrationTest):
|
|||
pl_data = self.ensure_plugin_installed(SSHFS)
|
||||
assert pl_data['Enabled'] is False
|
||||
prv = self.client.plugin_privileges(SSHFS)
|
||||
logs = [d for d in self.client.upgrade_plugin(SSHFS, SSHFS, prv)]
|
||||
logs = list(self.client.upgrade_plugin(SSHFS, SSHFS, prv))
|
||||
assert filter(lambda x: x['status'] == 'Download complete', logs)
|
||||
assert self.client.inspect_plugin(SSHFS)
|
||||
assert self.client.enable_plugin(SSHFS)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import docker
|
||||
import pytest
|
||||
|
||||
import docker
|
||||
|
||||
from ..helpers import force_leave_swarm, requires_api_version
|
||||
from .base import BaseAPIIntegrationTest
|
||||
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
import random
|
||||
import time
|
||||
|
||||
import docker
|
||||
import pytest
|
||||
|
||||
from ..helpers import (
|
||||
force_leave_swarm, requires_api_version, requires_experimental
|
||||
)
|
||||
from .base import BaseAPIIntegrationTest, TEST_IMG
|
||||
import docker
|
||||
|
||||
from ..helpers import force_leave_swarm, requires_api_version
|
||||
from .base import TEST_IMG, BaseAPIIntegrationTest
|
||||
|
||||
|
||||
class ServiceTest(BaseAPIIntegrationTest):
|
||||
|
@ -85,6 +84,20 @@ class ServiceTest(BaseAPIIntegrationTest):
|
|||
assert len(test_services) == 1
|
||||
assert test_services[0]['Spec']['Labels']['test_label'] == 'testing'
|
||||
|
||||
@requires_api_version('1.41')
|
||||
def test_list_services_with_status(self):
|
||||
test_services = self.client.services()
|
||||
assert len(test_services) == 0
|
||||
self.create_simple_service()
|
||||
test_services = self.client.services(
|
||||
filters={'name': 'dockerpytest_'}, status=False
|
||||
)
|
||||
assert 'ServiceStatus' not in test_services[0]
|
||||
test_services = self.client.services(
|
||||
filters={'name': 'dockerpytest_'}, status=True
|
||||
)
|
||||
assert 'ServiceStatus' in test_services[0]
|
||||
|
||||
def test_inspect_service_by_id(self):
|
||||
svc_name, svc_id = self.create_simple_service()
|
||||
svc_info = self.client.inspect_service(svc_id)
|
||||
|
@ -127,8 +140,7 @@ class ServiceTest(BaseAPIIntegrationTest):
|
|||
assert len(services) == 1
|
||||
assert services[0]['ID'] == svc_id['ID']
|
||||
|
||||
@requires_api_version('1.25')
|
||||
@requires_experimental(until='1.29')
|
||||
@requires_api_version('1.29')
|
||||
def test_service_logs(self):
|
||||
name, svc_id = self.create_simple_service()
|
||||
assert self.get_service_container(name, include_stopped=True)
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import copy
|
||||
import docker
|
||||
|
||||
import pytest
|
||||
|
||||
import docker
|
||||
|
||||
from ..helpers import force_leave_swarm, requires_api_version
|
||||
from .base import BaseAPIIntegrationTest
|
||||
|
||||
|
@ -127,11 +129,11 @@ class SwarmTest(BaseAPIIntegrationTest):
|
|||
assert self.init_swarm()
|
||||
with pytest.raises(docker.errors.APIError) as exc_info:
|
||||
self.client.leave_swarm()
|
||||
exc_info.value.response.status_code == 500
|
||||
assert exc_info.value.response.status_code == 503
|
||||
assert self.client.leave_swarm(force=True)
|
||||
with pytest.raises(docker.errors.APIError) as exc_info:
|
||||
self.client.inspect_swarm()
|
||||
exc_info.value.response.status_code == 406
|
||||
assert exc_info.value.response.status_code == 503
|
||||
assert self.client.leave_swarm(force=True)
|
||||
|
||||
@requires_api_version('1.24')
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import docker
|
||||
import pytest
|
||||
|
||||
import docker
|
||||
|
||||
from ..helpers import requires_api_version
|
||||
from .base import BaseAPIIntegrationTest
|
||||
|
||||
|
@ -16,10 +17,16 @@ class TestVolumes(BaseAPIIntegrationTest):
|
|||
assert result['Driver'] == 'local'
|
||||
|
||||
def test_create_volume_invalid_driver(self):
|
||||
driver_name = 'invalid.driver'
|
||||
# special name to avoid exponential timeout loop
|
||||
# https://github.com/moby/moby/blob/9e00a63d65434cdedc444e79a2b33a7c202b10d8/pkg/plugins/client.go#L253-L254
|
||||
driver_name = 'this-plugin-does-not-exist'
|
||||
|
||||
with pytest.raises(docker.errors.NotFound):
|
||||
with pytest.raises(docker.errors.APIError) as cm:
|
||||
self.client.create_volume('perfectcherryblossom', driver_name)
|
||||
assert (
|
||||
cm.value.response.status_code == 404 or
|
||||
cm.value.response.status_code == 400
|
||||
)
|
||||
|
||||
def test_list_volumes(self):
|
||||
name = 'imperishablenight'
|
||||
|
|
|
@ -3,9 +3,10 @@ import shutil
|
|||
import unittest
|
||||
|
||||
import docker
|
||||
from .. import helpers
|
||||
from docker.utils import kwargs_from_env
|
||||
|
||||
from .. import helpers
|
||||
|
||||
TEST_IMG = 'alpine:3.10'
|
||||
TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')
|
||||
|
||||
|
@ -103,8 +104,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
|
|||
if exitcode != 0:
|
||||
output = self.client.logs(container)
|
||||
raise Exception(
|
||||
"Container exited with code {}:\n{}"
|
||||
.format(exitcode, output))
|
||||
f"Container exited with code {exitcode}:\n{output}")
|
||||
|
||||
return container
|
||||
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
import threading
|
||||
import unittest
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import docker
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from ..helpers import requires_api_version
|
||||
from .base import TEST_API_VERSION
|
||||
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import sys
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
|
||||
import docker.errors
|
||||
from docker.utils import kwargs_from_env
|
||||
import pytest
|
||||
|
||||
from .base import TEST_IMG
|
||||
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import os
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
from docker import errors
|
||||
from docker.context import ContextAPI
|
||||
from docker.tls import TLSConfig
|
||||
|
||||
from .base import BaseAPIIntegrationTest
|
||||
|
||||
|
||||
|
@ -29,7 +32,7 @@ class ContextLifecycleTest(BaseAPIIntegrationTest):
|
|||
"test", tls_cfg=docker_tls)
|
||||
|
||||
# check for a context 'test' in the context store
|
||||
assert any([ctx.Name == "test" for ctx in ContextAPI.contexts()])
|
||||
assert any(ctx.Name == "test" for ctx in ContextAPI.contexts())
|
||||
# retrieve a context object for 'test'
|
||||
assert ContextAPI.get_context("test")
|
||||
# remove context
|
||||
|
|
|
@ -6,8 +6,11 @@ import sys
|
|||
import pytest
|
||||
|
||||
from docker.credentials import (
|
||||
CredentialsNotFound, Store, StoreError, DEFAULT_LINUX_STORE,
|
||||
DEFAULT_OSX_STORE
|
||||
DEFAULT_LINUX_STORE,
|
||||
DEFAULT_OSX_STORE,
|
||||
CredentialsNotFound,
|
||||
Store,
|
||||
StoreError,
|
||||
)
|
||||
|
||||
|
||||
|
@ -22,7 +25,7 @@ class TestStore:
|
|||
def setup_method(self):
|
||||
self.tmp_keys = []
|
||||
if sys.platform.startswith('linux'):
|
||||
if shutil.which('docker-credential-' + DEFAULT_LINUX_STORE):
|
||||
if shutil.which(f"docker-credential-{DEFAULT_LINUX_STORE}"):
|
||||
self.store = Store(DEFAULT_LINUX_STORE)
|
||||
elif shutil.which('docker-credential-pass'):
|
||||
self.store = Store('pass')
|
||||
|
@ -84,3 +87,10 @@ class TestStore:
|
|||
data = self.store._execute('--null', '')
|
||||
assert b'\0FOO=bar\0' in data
|
||||
assert 'FOO' not in os.environ
|
||||
|
||||
def test_unavailable_store(self):
|
||||
some_unavailable_store = None
|
||||
with pytest.warns(UserWarning):
|
||||
some_unavailable_store = Store('that-does-not-exist')
|
||||
with pytest.raises(StoreError):
|
||||
some_unavailable_store.get('anything-this-does-not-matter')
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import os
|
||||
from unittest import mock
|
||||
|
||||
from docker.credentials.utils import create_environment_dict
|
||||
from unittest import mock
|
||||
|
||||
|
||||
@mock.patch.dict(os.environ)
|
||||
def test_create_environment_dict():
|
||||
base = {'FOO': 'bar', 'BAZ': 'foobar'}
|
||||
os.environ = base
|
||||
os.environ = base # noqa: B003
|
||||
assert create_environment_dict({'FOO': 'baz'}) == {
|
||||
'FOO': 'baz', 'BAZ': 'foobar',
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue