Merge pull request #2680 from aiordache/replace_paramiko

Shell out to local SSH client as alternative to a paramiko connection
This commit is contained in:
Anca Iordache 2020-10-16 11:34:00 +02:00 committed by GitHub
commit e09b070575
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 1006 additions and 61 deletions

View File

@ -2,6 +2,10 @@ ARG PYTHON_VERSION=2.7
FROM python:${PYTHON_VERSION} FROM python:${PYTHON_VERSION}
# Add SSH keys and set permissions
COPY tests/ssh-keys /root/.ssh
RUN chmod -R 600 /root/.ssh
RUN mkdir /src RUN mkdir /src
WORKDIR /src WORKDIR /src

32
Jenkinsfile vendored
View File

@ -3,6 +3,7 @@
def imageNameBase = "dockerbuildbot/docker-py" def imageNameBase = "dockerbuildbot/docker-py"
def imageNamePy2 def imageNamePy2
def imageNamePy3 def imageNamePy3
def imageDindSSH
def images = [:] def images = [:]
def buildImage = { name, buildargs, pyTag -> def buildImage = { name, buildargs, pyTag ->
@ -13,7 +14,7 @@ def buildImage = { name, buildargs, pyTag ->
img = docker.build(name, buildargs) img = docker.build(name, buildargs)
img.push() img.push()
} }
images[pyTag] = img.id if (pyTag?.trim()) images[pyTag] = img.id
} }
def buildImages = { -> def buildImages = { ->
@ -23,7 +24,9 @@ def buildImages = { ->
imageNamePy2 = "${imageNameBase}:py2-${gitCommit()}" imageNamePy2 = "${imageNameBase}:py2-${gitCommit()}"
imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}" imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}"
imageDindSSH = "${imageNameBase}:sshdind-${gitCommit()}"
buildImage(imageDindSSH, "-f tests/Dockerfile-ssh-dind .", "")
buildImage(imageNamePy2, "-f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 .", "py2.7") buildImage(imageNamePy2, "-f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 .", "py2.7")
buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7") buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7")
} }
@ -81,22 +84,37 @@ def runTests = { Map settings ->
def testNetwork = "dpy-testnet-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" def testNetwork = "dpy-testnet-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}"
try { try {
sh """docker network create ${testNetwork}""" sh """docker network create ${testNetwork}"""
sh """docker run -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\
docker:${dockerVersion}-dind dockerd -H tcp://0.0.0.0:2375 ${imageDindSSH} dockerd -H tcp://0.0.0.0:2375
""" """
sh """docker run \\ sh """docker run --rm \\
--name ${testContainerName} \\ --name ${testContainerName} \\
-e "DOCKER_HOST=tcp://${dindContainerName}:2375" \\ -e "DOCKER_HOST=tcp://${dindContainerName}:2375" \\
-e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\
--network ${testNetwork} \\ --network ${testNetwork} \\
--volumes-from ${dindContainerName} \\ --volumes-from ${dindContainerName} \\
${testImage} \\ ${testImage} \\
py.test -v -rxs --cov=docker tests/ py.test -v -rxs --cov=docker --ignore=tests/ssh tests/
"""
sh """docker stop ${dindContainerName}"""
// start DIND container with SSH
sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\
${imageDindSSH} dockerd --experimental"""
sh """docker exec ${dindContainerName} sh -c /usr/sbin/sshd """
// run SSH tests only
sh """docker run --rm \\
--name ${testContainerName} \\
-e "DOCKER_HOST=ssh://${dindContainerName}:22" \\
-e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\
--network ${testNetwork} \\
--volumes-from ${dindContainerName} \\
${testImage} \\
py.test -v -rxs --cov=docker tests/ssh
""" """
} finally { } finally {
sh """ sh """
docker stop ${dindContainerName} ${testContainerName} docker stop ${dindContainerName}
docker rm -vf ${dindContainerName} ${testContainerName}
docker network rm ${testNetwork} docker network rm ${testNetwork}
""" """
} }

View File

@ -1,3 +1,6 @@
TEST_API_VERSION ?= 1.39
TEST_ENGINE_VERSION ?= 19.03.13
.PHONY: all .PHONY: all
all: test all: test
@ -10,6 +13,10 @@ clean:
build: build:
docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 --build-arg APT_MIRROR . docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 --build-arg APT_MIRROR .
.PHONY: build-dind-ssh
build-dind-ssh:
docker build -t docker-dind-ssh -f tests/Dockerfile-ssh-dind --build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} --build-arg API_VERSION=${TEST_API_VERSION} --build-arg APT_MIRROR .
.PHONY: build-py3 .PHONY: build-py3
build-py3: build-py3:
docker build -t docker-sdk-python3 -f tests/Dockerfile --build-arg APT_MIRROR . docker build -t docker-sdk-python3 -f tests/Dockerfile --build-arg APT_MIRROR .
@ -41,9 +48,6 @@ integration-test: build
integration-test-py3: build-py3 integration-test-py3: build-py3
docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file}
TEST_API_VERSION ?= 1.39
TEST_ENGINE_VERSION ?= 19.03.12
.PHONY: setup-network .PHONY: setup-network
setup-network: setup-network:
docker network inspect dpy-tests || docker network create dpy-tests docker network inspect dpy-tests || docker network create dpy-tests
@ -69,6 +73,29 @@ integration-dind-py3: build-py3 setup-network
--network dpy-tests docker-sdk-python3 py.test tests/integration/${file} --network dpy-tests docker-sdk-python3 py.test tests/integration/${file}
docker rm -vf dpy-dind-py3 docker rm -vf dpy-dind-py3
.PHONY: integration-ssh-py2
integration-ssh-py2: build-dind-ssh build setup-network
docker rm -vf dpy-dind-py2 || :
docker run -d --network dpy-tests --name dpy-dind-py2 --privileged\
docker-dind-ssh dockerd --experimental
# start SSH daemon
docker exec dpy-dind-py2 sh -c "/usr/sbin/sshd"
docker run -t --rm --env="DOCKER_HOST=ssh://dpy-dind-py2" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
--network dpy-tests docker-sdk-python py.test tests/ssh/${file}
docker rm -vf dpy-dind-py2
.PHONY: integration-ssh-py3
integration-ssh-py3: build-dind-ssh build-py3 setup-network
docker rm -vf dpy-dind-py3 || :
docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\
docker-dind-ssh dockerd --experimental
# start SSH daemon
docker exec dpy-dind-py3 sh -c "/usr/sbin/sshd"
docker run -t --rm --env="DOCKER_HOST=ssh://dpy-dind-py3" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
--network dpy-tests docker-sdk-python3 py.test tests/ssh/${file}
docker rm -vf dpy-dind-py3
.PHONY: integration-dind-ssl .PHONY: integration-dind-ssl
integration-dind-ssl: build-dind-certs build build-py3 integration-dind-ssl: build-dind-certs build build-py3
docker rm -vf dpy-dind-certs dpy-dind-ssl || : docker rm -vf dpy-dind-certs dpy-dind-ssl || :

View File

@ -89,6 +89,9 @@ class APIClient(
user_agent (str): Set a custom user agent for requests to the server. user_agent (str): Set a custom user agent for requests to the server.
credstore_env (dict): Override environment variables when calling the credstore_env (dict): Override environment variables when calling the
credential store process. credential store process.
use_ssh_client (bool): If set to `True`, an ssh connection is made
via shelling out to the ssh client. Ensure the ssh client is
installed and configured on the host.
""" """
__attrs__ = requests.Session.__attrs__ + ['_auth_configs', __attrs__ = requests.Session.__attrs__ + ['_auth_configs',
@ -100,7 +103,7 @@ class APIClient(
def __init__(self, base_url=None, version=None, def __init__(self, base_url=None, version=None,
timeout=DEFAULT_TIMEOUT_SECONDS, tls=False, timeout=DEFAULT_TIMEOUT_SECONDS, tls=False,
user_agent=DEFAULT_USER_AGENT, num_pools=None, user_agent=DEFAULT_USER_AGENT, num_pools=None,
credstore_env=None): credstore_env=None, use_ssh_client=False):
super(APIClient, self).__init__() super(APIClient, self).__init__()
if tls and not base_url: if tls and not base_url:
@ -161,7 +164,8 @@ class APIClient(
elif base_url.startswith('ssh://'): elif base_url.startswith('ssh://'):
try: try:
self._custom_adapter = SSHHTTPAdapter( self._custom_adapter = SSHHTTPAdapter(
base_url, timeout, pool_connections=num_pools base_url, timeout, pool_connections=num_pools,
shell_out=use_ssh_client
) )
except NameError: except NameError:
raise DockerException( raise DockerException(

View File

@ -35,6 +35,9 @@ class DockerClient(object):
user_agent (str): Set a custom user agent for requests to the server. user_agent (str): Set a custom user agent for requests to the server.
credstore_env (dict): Override environment variables when calling the credstore_env (dict): Override environment variables when calling the
credential store process. credential store process.
use_ssh_client (bool): If set to `True`, an ssh connection is made
via shelling out to the ssh client. Ensure the ssh client is
installed and configured on the host.
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.api = APIClient(*args, **kwargs) self.api = APIClient(*args, **kwargs)
@ -70,6 +73,9 @@ class DockerClient(object):
from. Default: the value of ``os.environ`` from. Default: the value of ``os.environ``
credstore_env (dict): Override environment variables when calling credstore_env (dict): Override environment variables when calling
the credential store process. the credential store process.
use_ssh_client (bool): If set to `True`, an ssh connection is
made via shelling out to the ssh client. Ensure the ssh
client is installed and configured on the host.
Example: Example:
@ -81,8 +87,12 @@ class DockerClient(object):
""" """
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS) timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS)
version = kwargs.pop('version', None) version = kwargs.pop('version', None)
use_ssh_client = kwargs.pop('use_ssh_client', False)
return cls( return cls(
timeout=timeout, version=version, **kwargs_from_env(**kwargs) timeout=timeout,
version=version,
use_ssh_client=use_ssh_client,
**kwargs_from_env(**kwargs)
) )
# Resources # Resources

View File

@ -1,8 +1,11 @@
import io
import paramiko import paramiko
import requests.adapters import requests.adapters
import six import six
import logging import logging
import os import os
import socket
import subprocess
from docker.transport.basehttpadapter import BaseHTTPAdapter from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants from .. import constants
@ -20,33 +23,140 @@ except ImportError:
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
def create_paramiko_client(base_url):
logging.getLogger("paramiko").setLevel(logging.WARNING)
ssh_client = paramiko.SSHClient()
base_url = six.moves.urllib_parse.urlparse(base_url)
ssh_params = {
"hostname": base_url.hostname,
"port": base_url.port,
"username": base_url.username
}
ssh_config_file = os.path.expanduser("~/.ssh/config")
if os.path.exists(ssh_config_file):
conf = paramiko.SSHConfig()
with open(ssh_config_file) as f:
conf.parse(f)
host_config = conf.lookup(base_url.hostname)
ssh_conf = host_config
if 'proxycommand' in host_config:
ssh_params["sock"] = paramiko.ProxyCommand(
ssh_conf['proxycommand']
)
if 'hostname' in host_config:
ssh_params['hostname'] = host_config['hostname']
if 'identityfile' in host_config:
ssh_params['key_filename'] = host_config['identityfile']
if base_url.port is None and 'port' in host_config:
ssh_params['port'] = ssh_conf['port']
if base_url.username is None and 'user' in host_config:
ssh_params['username'] = ssh_conf['user']
ssh_client.load_system_host_keys()
ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy())
return ssh_client, ssh_params
class SSHSocket(socket.socket):
def __init__(self, host):
super(SSHSocket, self).__init__(
socket.AF_INET, socket.SOCK_STREAM)
self.host = host
self.port = None
if ':' in host:
self.host, self.port = host.split(':')
self.proc = None
def connect(self, **kwargs):
port = '' if not self.port else '-p {}'.format(self.port)
args = [
'ssh',
'-q',
self.host,
port,
'docker system dial-stdio'
]
self.proc = subprocess.Popen(
' '.join(args),
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
def _write(self, data):
if not self.proc or self.proc.stdin.closed:
raise Exception('SSH subprocess not initiated.'
'connect() must be called first.')
written = self.proc.stdin.write(data)
self.proc.stdin.flush()
return written
def sendall(self, data):
self._write(data)
def send(self, data):
return self._write(data)
def recv(self):
if not self.proc:
raise Exception('SSH subprocess not initiated.'
'connect() must be called first.')
return self.proc.stdout.read()
def makefile(self, mode):
if not self.proc or self.proc.stdout.closed:
buf = io.BytesIO()
buf.write(b'\n\n')
return buf
return self.proc.stdout
def close(self):
if not self.proc or self.proc.stdin.closed:
return
self.proc.stdin.write(b'\n\n')
self.proc.stdin.flush()
self.proc.terminate()
class SSHConnection(httplib.HTTPConnection, object): class SSHConnection(httplib.HTTPConnection, object):
def __init__(self, ssh_transport, timeout=60): def __init__(self, ssh_transport=None, timeout=60, host=None):
super(SSHConnection, self).__init__( super(SSHConnection, self).__init__(
'localhost', timeout=timeout 'localhost', timeout=timeout
) )
self.ssh_transport = ssh_transport self.ssh_transport = ssh_transport
self.timeout = timeout self.timeout = timeout
self.host = host
def connect(self): def connect(self):
sock = self.ssh_transport.open_session() if self.ssh_transport:
sock.settimeout(self.timeout) sock = self.ssh_transport.open_session()
sock.exec_command('docker system dial-stdio') sock.settimeout(self.timeout)
sock.exec_command('docker system dial-stdio')
else:
sock = SSHSocket(self.host)
sock.settimeout(self.timeout)
sock.connect()
self.sock = sock self.sock = sock
class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
scheme = 'ssh' scheme = 'ssh'
def __init__(self, ssh_client, timeout=60, maxsize=10): def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None):
super(SSHConnectionPool, self).__init__( super(SSHConnectionPool, self).__init__(
'localhost', timeout=timeout, maxsize=maxsize 'localhost', timeout=timeout, maxsize=maxsize
) )
self.ssh_transport = ssh_client.get_transport() self.ssh_transport = None
if ssh_client:
self.ssh_transport = ssh_client.get_transport()
self.timeout = timeout self.timeout = timeout
self.host = host
self.port = None
if ':' in host:
self.host, self.port = host.split(':')
def _new_conn(self): def _new_conn(self):
return SSHConnection(self.ssh_transport, self.timeout) return SSHConnection(self.ssh_transport, self.timeout, self.host)
# When re-using connections, urllib3 calls fileno() on our # When re-using connections, urllib3 calls fileno() on our
# SSH channel instance, quickly overloading our fd limit. To avoid this, # SSH channel instance, quickly overloading our fd limit. To avoid this,
@ -78,39 +188,14 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
] ]
def __init__(self, base_url, timeout=60, def __init__(self, base_url, timeout=60,
pool_connections=constants.DEFAULT_NUM_POOLS): pool_connections=constants.DEFAULT_NUM_POOLS,
logging.getLogger("paramiko").setLevel(logging.WARNING) shell_out=True):
self.ssh_client = paramiko.SSHClient() self.ssh_client = None
base_url = six.moves.urllib_parse.urlparse(base_url) if not shell_out:
self.ssh_params = { self.ssh_client, self.ssh_params = create_paramiko_client(base_url)
"hostname": base_url.hostname, self._connect()
"port": base_url.port, base_url = base_url.lstrip('ssh://')
"username": base_url.username self.host = base_url
}
ssh_config_file = os.path.expanduser("~/.ssh/config")
if os.path.exists(ssh_config_file):
conf = paramiko.SSHConfig()
with open(ssh_config_file) as f:
conf.parse(f)
host_config = conf.lookup(base_url.hostname)
self.ssh_conf = host_config
if 'proxycommand' in host_config:
self.ssh_params["sock"] = paramiko.ProxyCommand(
self.ssh_conf['proxycommand']
)
if 'hostname' in host_config:
self.ssh_params['hostname'] = host_config['hostname']
if 'identityfile' in host_config:
self.ssh_params['key_filename'] = host_config['identityfile']
if base_url.port is None and 'port' in host_config:
self.ssh_params['port'] = self.ssh_conf['port']
if base_url.username is None and 'user' in host_config:
self.ssh_params['username'] = self.ssh_conf['user']
self.ssh_client.load_system_host_keys()
self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy())
self._connect()
self.timeout = timeout self.timeout = timeout
self.pools = RecentlyUsedContainer( self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close() pool_connections, dispose_func=lambda p: p.close()
@ -118,7 +203,8 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
super(SSHHTTPAdapter, self).__init__() super(SSHHTTPAdapter, self).__init__()
def _connect(self): def _connect(self):
self.ssh_client.connect(**self.ssh_params) if self.ssh_client:
self.ssh_client.connect(**self.ssh_params)
def get_connection(self, url, proxies=None): def get_connection(self, url, proxies=None):
with self.pools.lock: with self.pools.lock:
@ -127,11 +213,13 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
return pool return pool
# Connection is closed try a reconnect # Connection is closed try a reconnect
if not self.ssh_client.get_transport(): if self.ssh_client and not self.ssh_client.get_transport():
self._connect() self._connect()
pool = SSHConnectionPool( pool = SSHConnectionPool(
self.ssh_client, self.timeout ssh_client=self.ssh_client,
timeout=self.timeout,
host=self.host
) )
self.pools[url] = pool self.pools[url] = pool
@ -139,4 +227,5 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
def close(self): def close(self):
super(SSHHTTPAdapter, self).close() super(SSHHTTPAdapter, self).close()
self.ssh_client.close() if self.ssh_client:
self.ssh_client.close()

View File

@ -6,10 +6,13 @@ ARG APT_MIRROR
RUN sed -ri "s/(httpredir|deb).debian.org/${APT_MIRROR:-deb.debian.org}/g" /etc/apt/sources.list \ RUN sed -ri "s/(httpredir|deb).debian.org/${APT_MIRROR:-deb.debian.org}/g" /etc/apt/sources.list \
&& sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list && sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list
RUN apt-get update && apt-get -y install \ RUN apt-get update && apt-get -y install --no-install-recommends \
gnupg2 \ gnupg2 \
pass \ pass
curl
# Add SSH keys and set permissions
COPY tests/ssh-keys /root/.ssh
RUN chmod -R 600 /root/.ssh
COPY ./tests/gpg-keys /gpg-keys COPY ./tests/gpg-keys /gpg-keys
RUN gpg2 --import gpg-keys/secret RUN gpg2 --import gpg-keys/secret

23
tests/Dockerfile-ssh-dind Normal file
View File

@ -0,0 +1,23 @@
ARG API_VERSION=1.39
ARG ENGINE_VERSION=19.03.12
FROM docker:${ENGINE_VERSION}-dind
RUN apk add --no-cache \
openssh
# Add the keys and set permissions
RUN ssh-keygen -A
# copy the test SSH config
RUN echo "IgnoreUserKnownHosts yes" >> /etc/ssh/sshd_config && \
echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config && \
echo "PermitRootLogin yes" >> /etc/ssh/sshd_config
# set authorized keys for client paswordless connection
COPY tests/ssh-keys/authorized_keys /root/.ssh/authorized_keys
RUN chmod 600 /root/.ssh/authorized_keys
RUN echo "root:root" | chpasswd
RUN ln -s /usr/local/bin/docker /usr/bin/docker
EXPOSE 22

View File

@ -339,7 +339,6 @@ class BuildTest(BaseAPIIntegrationTest):
assert self.client.inspect_image(img_name) assert self.client.inspect_image(img_name)
ctnr = self.run_container(img_name, 'cat /hosts-file') ctnr = self.run_container(img_name, 'cat /hosts-file')
self.tmp_containers.append(ctnr)
logs = self.client.logs(ctnr) logs = self.client.logs(ctnr)
if six.PY3: if six.PY3:
logs = logs.decode('utf-8') logs = logs.decode('utf-8')

1
tests/ssh-keys/authorized_keys Executable file
View File

@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/BiXkbL9oEbE3PJv1S2p12XK5BHW3qQT5Rf+CYG0ATYyMPIVM6+IXVyf3QNxpnvPXvbPBQJCs0qHeuPwZy2Gsbt35QnmlgrczFPiXXosCD2N+wrcOQPZGuLjQyUUP2yJRVSTLpp8zk2F8w3laGIB3Jk1hUcMUExemKxQYk/L40b5rXKkarLk5awBuicjRStMrchPRHZ2n715TG+zSvf8tB/UHRXKYPqai/Je5eiH3yGUzCY4zn+uEoqAFb4V8lpIj8Rw3EXmCYVwG0vg+44QIQ2gJnIhTlcmxwkynvZn97nug4NLlGJQ+sDCnIvMapycHfGkNlBz3fFtu/ORsxPpZbTNg/9noa3Zf8OpIwvE/FHNPqDctGltwxEgQxj5fE34x0fYnF08tejAUJJCZE3YsGgNabsS4pD+kRhI83eFZvgj3Q1AeTK0V9bRM7jujcc9Rz+V9Gb5zYEHN/l8PxEVlj0OlURf9ZlknNQK8xRh597jDXTfVQKCMO/nRaWH2bq0=

3
tests/ssh-keys/config Normal file
View File

@ -0,0 +1,3 @@
Host *
StrictHostKeyChecking no
UserKnownHostsFile=/dev/null

38
tests/ssh-keys/id_rsa Normal file
View File

@ -0,0 +1,38 @@
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABlwAAAAdzc2gtcn
NhAAAAAwEAAQAAAYEAvwYl5Gy/aBGxNzyb9UtqddlyuQR1t6kE+UX/gmBtAE2MjDyFTOvi
F1cn90DcaZ7z172zwUCQrNKh3rj8GcthrG7d+UJ5pYK3MxT4l16LAg9jfsK3DkD2Rri40M
lFD9siUVUky6afM5NhfMN5WhiAdyZNYVHDFBMXpisUGJPy+NG+a1ypGqy5OWsAbonI0UrT
K3IT0R2dp+9eUxvs0r3/LQf1B0VymD6movyXuXoh98hlMwmOM5/rhKKgBW+FfJaSI/EcNx
F5gmFcBtL4PuOECENoCZyIU5XJscJMp72Z/e57oODS5RiUPrAwpyLzGqcnB3xpDZQc93xb
bvzkbMT6WW0zYP/Z6Gt2X/DqSMLxPxRzT6g3LRpbcMRIEMY+XxN+MdH2JxdPLXowFCSQmR
N2LBoDWm7EuKQ/pEYSPN3hWb4I90NQHkytFfW0TO47o3HPUc/lfRm+c2BBzf5fD8RFZY9D
pVEX/WZZJzUCvMUYefe4w1031UCgjDv50Wlh9m6tAAAFeM2kMyHNpDMhAAAAB3NzaC1yc2
EAAAGBAL8GJeRsv2gRsTc8m/VLanXZcrkEdbepBPlF/4JgbQBNjIw8hUzr4hdXJ/dA3Gme
89e9s8FAkKzSod64/BnLYaxu3flCeaWCtzMU+JdeiwIPY37Ctw5A9ka4uNDJRQ/bIlFVJM
umnzOTYXzDeVoYgHcmTWFRwxQTF6YrFBiT8vjRvmtcqRqsuTlrAG6JyNFK0ytyE9Ednafv
XlMb7NK9/y0H9QdFcpg+pqL8l7l6IffIZTMJjjOf64SioAVvhXyWkiPxHDcReYJhXAbS+D
7jhAhDaAmciFOVybHCTKe9mf3ue6Dg0uUYlD6wMKci8xqnJwd8aQ2UHPd8W2785GzE+llt
M2D/2ehrdl/w6kjC8T8Uc0+oNy0aW3DESBDGPl8TfjHR9icXTy16MBQkkJkTdiwaA1puxL
ikP6RGEjzd4Vm+CPdDUB5MrRX1tEzuO6Nxz1HP5X0ZvnNgQc3+Xw/ERWWPQ6VRF/1mWSc1
ArzFGHn3uMNdN9VAoIw7+dFpYfZurQAAAAMBAAEAAAGBAKtnotyiz+Vb6r57vh2OvEpfAd
gOrmpMWVArhSfBykz5SOIU9C+fgVIcPJpaMuz7WiX97Ku9eZP5tJGbP2sN2ejV2ovtICZp
cmV9rcp1ZRpGIKr/oS5DEDlJS1zdHQErSlHcqpWqPzQSTOmcpOk5Dxza25g1u2vp7dCG2x
NqvhySZ+ECViK/Vby1zL9jFzTlhTJ4vFtpzauA2AyPBCPdpHkNqMoLgNYncXLSYHpnos8p
m9T+AAFGwBhVrGz0Mr0mhRDnV/PgbKplKT7l+CGceb8LuWmj/vzuP5Wv6dglw3hJnT2V5p
nTBp3dJ6R006+yvr5T/Xb+ObGqFfgfenjLfHjqbJ/gZdGWt4Le84g8tmSkjJBJ2Yj3kynQ
sdfv9k7JJ4t5euoje0XW0YVN1ih5DdyO4hHDRD1lSTFYT5Gl2sCTt28qsMC12rWzFkezJo
Fhewq2Ddtg4AK6SxqH4rFQCmgOR/ci7jv9TXS9xEQxYliyN5aNymRTyXmwqBIzjNKR6QAA
AMEAxpme2upng9LS6Epa83d1gnWUilYPbpb1C8+1FgpnBv9zkjFE1vY0Vu4i9LcLGlCQ0x
PB1Z16TQlEluqiSuSA0eyaWSQBF9NyGsOCOZ63lpJs/2FRBfcbUvHhv8/g1fv/xvI+FnE+
DoAhz8V3byU8HUZer7pQY3hSxisdYdsaromxC8DSSPFQoxpxwh7WuP4c3veWkdL13h4fSN
khGr3G1XGfsZOu6V6F1i7yMU6OcwBAxzPsHqZv66sT8lE6n4xjAAAAwQDzAaVaJqZ2ROoF
loltJZUtE7o+zpoDzjOJyGYaCYTU4dHPN1aeYBjw8QfmJhdmZfJp9AeJDB/W0wzoHi2ONI
chnQ1EdbCLk9pvA7rhfVdZaxPeHwniDp2iA/wZKTRG3hav9nEzS72uXuZprCsbBvGXeR0z
iuIx5odVXG8qyuI9lDY6B/IoLg7zd+V6iw9mqWYlLLsgHiAvg32LAT4j0KoTufOqpnxqTQ
P2EguTmxDWkfQmbEHdJvbD2tLQ90zMlwMAAADBAMk88wOA1i/TibH5gm/lAtKPcNKbrHfk
7O9gdSZd2HL0fLjptpOplS89Y7muTElsRDRGiKq+7KV/sxQRNcITkxdTKu8CKnftFWHrLk
9WHWVHXbu9h8ttsKeUr9i27ojxpe5I82of8k7fJTg1LxMnGzuDZfq1BGsQnOWrY7r1Yjcd
8EtSrwOB+J/S4U+rR6kwUEFYeBkhE599P1EtHTCm8kWh368di9Q+Y/VIOa3qRx4hxuiCLI
qj4ZpdVMk2cCNcjwAAAAAB
-----END OPENSSH PRIVATE KEY-----

View File

@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/BiXkbL9oEbE3PJv1S2p12XK5BHW3qQT5Rf+CYG0ATYyMPIVM6+IXVyf3QNxpnvPXvbPBQJCs0qHeuPwZy2Gsbt35QnmlgrczFPiXXosCD2N+wrcOQPZGuLjQyUUP2yJRVSTLpp8zk2F8w3laGIB3Jk1hUcMUExemKxQYk/L40b5rXKkarLk5awBuicjRStMrchPRHZ2n715TG+zSvf8tB/UHRXKYPqai/Je5eiH3yGUzCY4zn+uEoqAFb4V8lpIj8Rw3EXmCYVwG0vg+44QIQ2gJnIhTlcmxwkynvZn97nug4NLlGJQ+sDCnIvMapycHfGkNlBz3fFtu/ORsxPpZbTNg/9noa3Zf8OpIwvE/FHNPqDctGltwxEgQxj5fE34x0fYnF08tejAUJJCZE3YsGgNabsS4pD+kRhI83eFZvgj3Q1AeTK0V9bRM7jujcc9Rz+V9Gb5zYEHN/l8PxEVlj0OlURf9ZlknNQK8xRh597jDXTfVQKCMO/nRaWH2bq0=

0
tests/ssh/__init__.py Normal file
View File

595
tests/ssh/api_build_test.py Normal file
View File

@ -0,0 +1,595 @@
import io
import os
import shutil
import tempfile
from docker import errors
from docker.utils.proxy import ProxyConfig
import pytest
import six
from .base import BaseAPIIntegrationTest, TEST_IMG
from ..helpers import random_name, requires_api_version, requires_experimental
class BuildTest(BaseAPIIntegrationTest):
def test_build_with_proxy(self):
self.client._proxy_configs = ProxyConfig(
ftp='a', http='b', https='c', no_proxy='d'
)
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN env | grep "FTP_PROXY=a"',
'RUN env | grep "ftp_proxy=a"',
'RUN env | grep "HTTP_PROXY=b"',
'RUN env | grep "http_proxy=b"',
'RUN env | grep "HTTPS_PROXY=c"',
'RUN env | grep "https_proxy=c"',
'RUN env | grep "NO_PROXY=d"',
'RUN env | grep "no_proxy=d"',
]).encode('ascii'))
self.client.build(fileobj=script, decode=True)
def test_build_with_proxy_and_buildargs(self):
self.client._proxy_configs = ProxyConfig(
ftp='a', http='b', https='c', no_proxy='d'
)
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN env | grep "FTP_PROXY=XXX"',
'RUN env | grep "ftp_proxy=xxx"',
'RUN env | grep "HTTP_PROXY=b"',
'RUN env | grep "http_proxy=b"',
'RUN env | grep "HTTPS_PROXY=c"',
'RUN env | grep "https_proxy=c"',
'RUN env | grep "NO_PROXY=d"',
'RUN env | grep "no_proxy=d"',
]).encode('ascii'))
self.client.build(
fileobj=script,
decode=True,
buildargs={'FTP_PROXY': 'XXX', 'ftp_proxy': 'xxx'}
)
def test_build_streaming(self):
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN mkdir -p /tmp/test',
'EXPOSE 8080',
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
' /tmp/silence.tar.gz'
]).encode('ascii'))
stream = self.client.build(fileobj=script, decode=True)
logs = []
for chunk in stream:
logs.append(chunk)
assert len(logs) > 0
def test_build_from_stringio(self):
if six.PY3:
return
script = io.StringIO(six.text_type('\n').join([
'FROM busybox',
'RUN mkdir -p /tmp/test',
'EXPOSE 8080',
'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
' /tmp/silence.tar.gz'
]))
stream = self.client.build(fileobj=script)
logs = ''
for chunk in stream:
if six.PY3:
chunk = chunk.decode('utf-8')
logs += chunk
assert logs != ''
def test_build_with_dockerignore(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
f.write("\n".join([
'FROM busybox',
'ADD . /test',
]))
with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
f.write("\n".join([
'ignored',
'Dockerfile',
'.dockerignore',
'!ignored/subdir/excepted-file',
'', # empty line,
'#*', # comment line
]))
with open(os.path.join(base_dir, 'not-ignored'), 'w') as f:
f.write("this file should not be ignored")
with open(os.path.join(base_dir, '#file.txt'), 'w') as f:
f.write('this file should not be ignored')
subdir = os.path.join(base_dir, 'ignored', 'subdir')
os.makedirs(subdir)
with open(os.path.join(subdir, 'file'), 'w') as f:
f.write("this file should be ignored")
with open(os.path.join(subdir, 'excepted-file'), 'w') as f:
f.write("this file should not be ignored")
tag = 'docker-py-test-build-with-dockerignore'
stream = self.client.build(
path=base_dir,
tag=tag,
)
for chunk in stream:
pass
c = self.client.create_container(tag, ['find', '/test', '-type', 'f'])
self.client.start(c)
self.client.wait(c)
logs = self.client.logs(c)
if six.PY3:
logs = logs.decode('utf-8')
assert sorted(list(filter(None, logs.split('\n')))) == sorted([
'/test/#file.txt',
'/test/ignored/subdir/excepted-file',
'/test/not-ignored'
])
def test_build_with_buildargs(self):
script = io.BytesIO('\n'.join([
'FROM scratch',
'ARG test',
'USER $test'
]).encode('ascii'))
stream = self.client.build(
fileobj=script, tag='buildargs', buildargs={'test': 'OK'}
)
self.tmp_imgs.append('buildargs')
for chunk in stream:
pass
info = self.client.inspect_image('buildargs')
assert info['Config']['User'] == 'OK'
@requires_api_version('1.22')
def test_build_shmsize(self):
script = io.BytesIO('\n'.join([
'FROM scratch',
'CMD sh -c "echo \'Hello, World!\'"',
]).encode('ascii'))
tag = 'shmsize'
shmsize = 134217728
stream = self.client.build(
fileobj=script, tag=tag, shmsize=shmsize
)
self.tmp_imgs.append(tag)
for chunk in stream:
pass
# There is currently no way to get the shmsize
# that was used to build the image
@requires_api_version('1.24')
def test_build_isolation(self):
script = io.BytesIO('\n'.join([
'FROM scratch',
'CMD sh -c "echo \'Deaf To All But The Song\''
]).encode('ascii'))
stream = self.client.build(
fileobj=script, tag='isolation',
isolation='default'
)
for chunk in stream:
pass
@requires_api_version('1.23')
def test_build_labels(self):
script = io.BytesIO('\n'.join([
'FROM scratch',
]).encode('ascii'))
labels = {'test': 'OK'}
stream = self.client.build(
fileobj=script, tag='labels', labels=labels
)
self.tmp_imgs.append('labels')
for chunk in stream:
pass
info = self.client.inspect_image('labels')
assert info['Config']['Labels'] == labels
@requires_api_version('1.25')
def test_build_with_cache_from(self):
script = io.BytesIO('\n'.join([
'FROM busybox',
'ENV FOO=bar',
'RUN touch baz',
'RUN touch bax',
]).encode('ascii'))
stream = self.client.build(fileobj=script, tag='build1')
self.tmp_imgs.append('build1')
for chunk in stream:
pass
stream = self.client.build(
fileobj=script, tag='build2', cache_from=['build1'],
decode=True
)
self.tmp_imgs.append('build2')
counter = 0
for chunk in stream:
if 'Using cache' in chunk.get('stream', ''):
counter += 1
assert counter == 3
self.client.remove_image('build2')
counter = 0
stream = self.client.build(
fileobj=script, tag='build2', cache_from=['nosuchtag'],
decode=True
)
for chunk in stream:
if 'Using cache' in chunk.get('stream', ''):
counter += 1
assert counter == 0
@requires_api_version('1.29')
def test_build_container_with_target(self):
script = io.BytesIO('\n'.join([
'FROM busybox as first',
'RUN mkdir -p /tmp/test',
'RUN touch /tmp/silence.tar.gz',
'FROM alpine:latest',
'WORKDIR /root/'
'COPY --from=first /tmp/silence.tar.gz .',
'ONBUILD RUN echo "This should not be in the final image"'
]).encode('ascii'))
stream = self.client.build(
fileobj=script, target='first', tag='build1'
)
self.tmp_imgs.append('build1')
for chunk in stream:
pass
info = self.client.inspect_image('build1')
assert not info['Config']['OnBuild']
@requires_api_version('1.25')
def test_build_with_network_mode(self):
# Set up pingable endpoint on custom network
network = self.client.create_network(random_name())['Id']
self.tmp_networks.append(network)
container = self.client.create_container(TEST_IMG, 'top')
self.tmp_containers.append(container)
self.client.start(container)
self.client.connect_container_to_network(
container, network, aliases=['pingtarget.docker']
)
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN ping -c1 pingtarget.docker'
]).encode('ascii'))
stream = self.client.build(
fileobj=script, network_mode=network,
tag='dockerpytest_customnetbuild'
)
self.tmp_imgs.append('dockerpytest_customnetbuild')
for chunk in stream:
pass
assert self.client.inspect_image('dockerpytest_customnetbuild')
script.seek(0)
stream = self.client.build(
fileobj=script, network_mode='none',
tag='dockerpytest_nonebuild', nocache=True, decode=True
)
self.tmp_imgs.append('dockerpytest_nonebuild')
logs = [chunk for chunk in stream]
assert 'errorDetail' in logs[-1]
assert logs[-1]['errorDetail']['code'] == 1
with pytest.raises(errors.NotFound):
self.client.inspect_image('dockerpytest_nonebuild')
@requires_api_version('1.27')
def test_build_with_extra_hosts(self):
img_name = 'dockerpytest_extrahost_build'
self.tmp_imgs.append(img_name)
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN ping -c1 hello.world.test',
'RUN ping -c1 extrahost.local.test',
'RUN cp /etc/hosts /hosts-file'
]).encode('ascii'))
stream = self.client.build(
fileobj=script, tag=img_name,
extra_hosts={
'extrahost.local.test': '127.0.0.1',
'hello.world.test': '127.0.0.1',
}, decode=True
)
for chunk in stream:
if 'errorDetail' in chunk:
pytest.fail(chunk)
assert self.client.inspect_image(img_name)
ctnr = self.run_container(img_name, 'cat /hosts-file')
logs = self.client.logs(ctnr)
if six.PY3:
logs = logs.decode('utf-8')
assert '127.0.0.1\textrahost.local.test' in logs
assert '127.0.0.1\thello.world.test' in logs
@requires_experimental(until=None)
@requires_api_version('1.25')
def test_build_squash(self):
script = io.BytesIO('\n'.join([
'FROM busybox',
'RUN echo blah > /file_1',
'RUN echo blahblah > /file_2',
'RUN echo blahblahblah > /file_3'
]).encode('ascii'))
def build_squashed(squash):
tag = 'squash' if squash else 'nosquash'
stream = self.client.build(
fileobj=script, tag=tag, squash=squash
)
self.tmp_imgs.append(tag)
for chunk in stream:
pass
return self.client.inspect_image(tag)
non_squashed = build_squashed(False)
squashed = build_squashed(True)
assert len(non_squashed['RootFS']['Layers']) == 4
assert len(squashed['RootFS']['Layers']) == 2
def test_build_stderr_data(self):
control_chars = ['\x1b[91m', '\x1b[0m']
snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)'
script = io.BytesIO(b'\n'.join([
b'FROM busybox',
'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8')
]))
stream = self.client.build(
fileobj=script, decode=True, nocache=True
)
lines = []
for chunk in stream:
lines.append(chunk.get('stream'))
expected = '{0}{2}\n{1}'.format(
control_chars[0], control_chars[1], snippet
)
assert any([line == expected for line in lines])
def test_build_gzip_encoding(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
f.write("\n".join([
'FROM busybox',
'ADD . /test',
]))
stream = self.client.build(
path=base_dir, decode=True, nocache=True,
gzip=True
)
lines = []
for chunk in stream:
lines.append(chunk)
assert 'Successfully built' in lines[-1]['stream']
def test_build_with_dockerfile_empty_lines(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
f.write('FROM busybox\n')
with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
f.write('\n'.join([
' ',
'',
'\t\t',
'\t ',
]))
stream = self.client.build(
path=base_dir, decode=True, nocache=True
)
lines = []
for chunk in stream:
lines.append(chunk)
assert 'Successfully built' in lines[-1]['stream']
def test_build_gzip_custom_encoding(self):
with pytest.raises(errors.DockerException):
self.client.build(path='.', gzip=True, encoding='text/html')
@requires_api_version('1.32')
@requires_experimental(until=None)
def test_build_invalid_platform(self):
script = io.BytesIO('FROM busybox\n'.encode('ascii'))
with pytest.raises(errors.APIError) as excinfo:
stream = self.client.build(fileobj=script, platform='foobar')
for _ in stream:
pass
# Some API versions incorrectly returns 500 status; assert 4xx or 5xx
assert excinfo.value.is_error()
assert 'unknown operating system' in excinfo.exconly() \
or 'invalid platform' in excinfo.exconly()
def test_build_out_of_context_dockerfile(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
f.write('hello world')
with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
f.write('.dockerignore\n')
df_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, df_dir)
df_name = os.path.join(df_dir, 'Dockerfile')
with open(df_name, 'wb') as df:
df.write(('\n'.join([
'FROM busybox',
'COPY . /src',
'WORKDIR /src',
])).encode('utf-8'))
df.flush()
img_name = random_name()
self.tmp_imgs.append(img_name)
stream = self.client.build(
path=base_dir, dockerfile=df_name, tag=img_name,
decode=True
)
lines = []
for chunk in stream:
lines.append(chunk)
assert 'Successfully tagged' in lines[-1]['stream']
ctnr = self.client.create_container(img_name, 'ls -a')
self.tmp_containers.append(ctnr)
self.client.start(ctnr)
lsdata = self.client.logs(ctnr).strip().split(b'\n')
assert len(lsdata) == 3
assert sorted([b'.', b'..', b'file.txt']) == sorted(lsdata)
def test_build_in_context_dockerfile(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
f.write('hello world')
with open(os.path.join(base_dir, 'custom.dockerfile'), 'w') as df:
df.write('\n'.join([
'FROM busybox',
'COPY . /src',
'WORKDIR /src',
]))
img_name = random_name()
self.tmp_imgs.append(img_name)
stream = self.client.build(
path=base_dir, dockerfile='custom.dockerfile', tag=img_name,
decode=True
)
lines = []
for chunk in stream:
lines.append(chunk)
assert 'Successfully tagged' in lines[-1]['stream']
ctnr = self.client.create_container(img_name, 'ls -a')
self.tmp_containers.append(ctnr)
self.client.start(ctnr)
lsdata = self.client.logs(ctnr).strip().split(b'\n')
assert len(lsdata) == 4
assert sorted(
[b'.', b'..', b'file.txt', b'custom.dockerfile']
) == sorted(lsdata)
def test_build_in_context_nested_dockerfile(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
f.write('hello world')
subdir = os.path.join(base_dir, 'hello', 'world')
os.makedirs(subdir)
with open(os.path.join(subdir, 'custom.dockerfile'), 'w') as df:
df.write('\n'.join([
'FROM busybox',
'COPY . /src',
'WORKDIR /src',
]))
img_name = random_name()
self.tmp_imgs.append(img_name)
stream = self.client.build(
path=base_dir, dockerfile='hello/world/custom.dockerfile',
tag=img_name, decode=True
)
lines = []
for chunk in stream:
lines.append(chunk)
assert 'Successfully tagged' in lines[-1]['stream']
ctnr = self.client.create_container(img_name, 'ls -a')
self.tmp_containers.append(ctnr)
self.client.start(ctnr)
lsdata = self.client.logs(ctnr).strip().split(b'\n')
assert len(lsdata) == 4
assert sorted(
[b'.', b'..', b'file.txt', b'hello']
) == sorted(lsdata)
def test_build_in_context_abs_dockerfile(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
abs_dockerfile_path = os.path.join(base_dir, 'custom.dockerfile')
with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
f.write('hello world')
with open(abs_dockerfile_path, 'w') as df:
df.write('\n'.join([
'FROM busybox',
'COPY . /src',
'WORKDIR /src',
]))
img_name = random_name()
self.tmp_imgs.append(img_name)
stream = self.client.build(
path=base_dir, dockerfile=abs_dockerfile_path, tag=img_name,
decode=True
)
lines = []
for chunk in stream:
lines.append(chunk)
assert 'Successfully tagged' in lines[-1]['stream']
ctnr = self.client.create_container(img_name, 'ls -a')
self.tmp_containers.append(ctnr)
self.client.start(ctnr)
lsdata = self.client.logs(ctnr).strip().split(b'\n')
assert len(lsdata) == 4
assert sorted(
[b'.', b'..', b'file.txt', b'custom.dockerfile']
) == sorted(lsdata)
@requires_api_version('1.31')
@pytest.mark.xfail(
True,
reason='Currently fails on 18.09: '
'https://github.com/moby/moby/issues/37920'
)
def test_prune_builds(self):
prune_result = self.client.prune_builds()
assert 'SpaceReclaimed' in prune_result
assert isinstance(prune_result['SpaceReclaimed'], int)

130
tests/ssh/base.py Normal file
View File

@ -0,0 +1,130 @@
import os
import shutil
import unittest
import docker
from .. import helpers
from docker.utils import kwargs_from_env
TEST_IMG = 'alpine:3.10'
TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')
class BaseIntegrationTest(unittest.TestCase):
"""
A base class for integration test cases. It cleans up the Docker server
after itself.
"""
def setUp(self):
self.tmp_imgs = []
self.tmp_containers = []
self.tmp_folders = []
self.tmp_volumes = []
self.tmp_networks = []
self.tmp_plugins = []
self.tmp_secrets = []
self.tmp_configs = []
def tearDown(self):
client = docker.from_env(version=TEST_API_VERSION, use_ssh_client=True)
try:
for img in self.tmp_imgs:
try:
client.api.remove_image(img)
except docker.errors.APIError:
pass
for container in self.tmp_containers:
try:
client.api.remove_container(container, force=True, v=True)
except docker.errors.APIError:
pass
for network in self.tmp_networks:
try:
client.api.remove_network(network)
except docker.errors.APIError:
pass
for volume in self.tmp_volumes:
try:
client.api.remove_volume(volume)
except docker.errors.APIError:
pass
for secret in self.tmp_secrets:
try:
client.api.remove_secret(secret)
except docker.errors.APIError:
pass
for config in self.tmp_configs:
try:
client.api.remove_config(config)
except docker.errors.APIError:
pass
for folder in self.tmp_folders:
shutil.rmtree(folder)
finally:
client.close()
class BaseAPIIntegrationTest(BaseIntegrationTest):
"""
A test case for `APIClient` integration tests. It sets up an `APIClient`
as `self.client`.
"""
@classmethod
def setUpClass(cls):
cls.client = cls.get_client_instance()
cls.client.pull(TEST_IMG)
def tearDown(self):
super(BaseAPIIntegrationTest, self).tearDown()
self.client.close()
@staticmethod
def get_client_instance():
return docker.APIClient(
version=TEST_API_VERSION,
timeout=60,
use_ssh_client=True,
**kwargs_from_env()
)
@staticmethod
def _init_swarm(client, **kwargs):
return client.init_swarm(
'127.0.0.1', listen_addr=helpers.swarm_listen_addr(), **kwargs
)
def run_container(self, *args, **kwargs):
container = self.client.create_container(*args, **kwargs)
self.tmp_containers.append(container)
self.client.start(container)
exitcode = self.client.wait(container)['StatusCode']
if exitcode != 0:
output = self.client.logs(container)
raise Exception(
"Container exited with code {}:\n{}"
.format(exitcode, output))
return container
def create_and_start(self, image=TEST_IMG, command='top', **kwargs):
container = self.client.create_container(
image=image, command=command, **kwargs)
self.tmp_containers.append(container)
self.client.start(container)
return container
def execute(self, container, cmd, exit_code=0, **kwargs):
exc = self.client.exec_create(container, cmd, **kwargs)
output = self.client.exec_start(exc)
actual_exit_code = self.client.exec_inspect(exc)['ExitCode']
msg = "Expected `{}` to exit with code {} but returned {}:\n{}".format(
" ".join(cmd), exit_code, actual_exit_code, output)
assert actual_exit_code == exit_code, msg
def init_swarm(self, **kwargs):
return self._init_swarm(self.client, **kwargs)