mirror of https://github.com/docker/docker-py.git
				
				
				
			Add SSH tests
Signed-off-by: aiordache <anca.iordache@docker.com>
This commit is contained in:
		
							parent
							
								
									dd8b9b7f10
								
							
						
					
					
						commit
						b72988bc87
					
				|  | @ -13,3 +13,11 @@ RUN pip install -r test-requirements.txt | ||||||
| 
 | 
 | ||||||
| COPY . /src | COPY . /src | ||||||
| RUN pip install . | RUN pip install . | ||||||
|  | 
 | ||||||
|  | # install SSHD | ||||||
|  | RUN apt-get install -y openssh-client | ||||||
|  | 
 | ||||||
|  | # Add the keys and set permissions | ||||||
|  | COPY ./tests/ssh-keys /root/.ssh | ||||||
|  | RUN chmod 600 /root/.ssh/id_rsa && \ | ||||||
|  |     chmod 600 /root/.ssh/id_rsa.pub | ||||||
|  |  | ||||||
|  | @ -13,3 +13,10 @@ RUN pip install -r test-requirements.txt | ||||||
| 
 | 
 | ||||||
| COPY . /src | COPY . /src | ||||||
| RUN pip install . | RUN pip install . | ||||||
|  | 
 | ||||||
|  | # install SSHD | ||||||
|  | RUN apt-get install -y openssh-client | ||||||
|  | # Add the keys and set permissions | ||||||
|  | COPY tests/ssh-keys /root/.ssh | ||||||
|  | RUN chmod 600 /root/.ssh/id_rsa && \ | ||||||
|  |     chmod 600 /root/.ssh/id_rsa.pub | ||||||
							
								
								
									
										33
									
								
								Makefile
								
								
								
								
							
							
						
						
									
										33
									
								
								Makefile
								
								
								
								
							|  | @ -1,3 +1,6 @@ | ||||||
|  | TEST_API_VERSION ?= 1.39 | ||||||
|  | TEST_ENGINE_VERSION ?= 19.03.12 | ||||||
|  | 
 | ||||||
| .PHONY: all | .PHONY: all | ||||||
| all: test | all: test | ||||||
| 
 | 
 | ||||||
|  | @ -10,6 +13,10 @@ clean: | ||||||
| build: | build: | ||||||
| 	docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 --build-arg APT_MIRROR . | 	docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 --build-arg APT_MIRROR . | ||||||
| 
 | 
 | ||||||
|  | .PHONY: build-dind-ssh | ||||||
|  | build-dind-ssh: | ||||||
|  | 	docker build -t docker-dind-ssh -f tests/Dockerfile-ssh-dind --build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} --build-arg API_VERSION=${TEST_API_VERSION} --build-arg APT_MIRROR . | ||||||
|  | 
 | ||||||
| .PHONY: build-py3 | .PHONY: build-py3 | ||||||
| build-py3: | build-py3: | ||||||
| 	docker build -t docker-sdk-python3 -f tests/Dockerfile --build-arg APT_MIRROR . | 	docker build -t docker-sdk-python3 -f tests/Dockerfile --build-arg APT_MIRROR . | ||||||
|  | @ -41,9 +48,6 @@ integration-test: build | ||||||
| integration-test-py3: build-py3 | integration-test-py3: build-py3 | ||||||
| 	docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} | 	docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} | ||||||
| 
 | 
 | ||||||
| TEST_API_VERSION ?= 1.39 |  | ||||||
| TEST_ENGINE_VERSION ?= 19.03.12 |  | ||||||
| 
 |  | ||||||
| .PHONY: setup-network | .PHONY: setup-network | ||||||
| setup-network: | setup-network: | ||||||
| 	docker network inspect dpy-tests || docker network create dpy-tests | 	docker network inspect dpy-tests || docker network create dpy-tests | ||||||
|  | @ -69,6 +73,29 @@ integration-dind-py3: build-py3 setup-network | ||||||
| 		--network dpy-tests docker-sdk-python3 py.test tests/integration/${file} | 		--network dpy-tests docker-sdk-python3 py.test tests/integration/${file} | ||||||
| 	docker rm -vf dpy-dind-py3 | 	docker rm -vf dpy-dind-py3 | ||||||
| 
 | 
 | ||||||
|  | .PHONY: integration-ssh-py2 | ||||||
|  | integration-ssh-py2: build-dind-ssh build setup-network | ||||||
|  | 	docker rm -vf dpy-dind-py2 || : | ||||||
|  | 	docker run -d --network dpy-tests --name dpy-dind-py2 --privileged\
 | ||||||
|  | 		docker-dind-ssh dockerd --experimental | ||||||
|  | 	# start SSH daemon | ||||||
|  | 	docker exec dpy-dind-py2 sh -c "/usr/sbin/sshd -D -o ListenAddress=0.0.0.0 &" | ||||||
|  | 	docker run -t --rm --env="DOCKER_HOST=ssh://dpy-dind-py2" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
 | ||||||
|  | 		--network dpy-tests docker-sdk-python py.test tests/ssh/${file} | ||||||
|  | 	docker rm -vf dpy-dind-py2 | ||||||
|  | 
 | ||||||
|  | .PHONY: integration-ssh-py3 | ||||||
|  | integration-ssh-py3: build-dind-ssh build-py3 setup-network | ||||||
|  | 	docker rm -vf dpy-dind-py3 || : | ||||||
|  | 	docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\
 | ||||||
|  | 		docker-dind-ssh dockerd --experimental | ||||||
|  | 	# start SSH daemon | ||||||
|  | 	docker exec dpy-dind-py3 sh -c "/usr/sbin/sshd -D -o ListenAddress=0.0.0.0 &" | ||||||
|  | 	docker run -t --rm --env="DOCKER_HOST=ssh://dpy-dind-py3" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\
 | ||||||
|  | 		--network dpy-tests docker-sdk-python3 py.test tests/ssh/${file} | ||||||
|  | 	docker rm -vf dpy-dind-py3 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| .PHONY: integration-dind-ssl | .PHONY: integration-dind-ssl | ||||||
| integration-dind-ssl: build-dind-certs build build-py3 | integration-dind-ssl: build-dind-certs build build-py3 | ||||||
| 	docker rm -vf dpy-dind-certs dpy-dind-ssl || : | 	docker rm -vf dpy-dind-certs dpy-dind-ssl || : | ||||||
|  |  | ||||||
|  | @ -11,7 +11,7 @@ from .. import auth | ||||||
| from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH, | from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH, | ||||||
|                          DEFAULT_TIMEOUT_SECONDS, DEFAULT_USER_AGENT, |                          DEFAULT_TIMEOUT_SECONDS, DEFAULT_USER_AGENT, | ||||||
|                          IS_WINDOWS_PLATFORM, MINIMUM_DOCKER_API_VERSION, |                          IS_WINDOWS_PLATFORM, MINIMUM_DOCKER_API_VERSION, | ||||||
|                          STREAM_HEADER_SIZE_BYTES, DEFAULT_SSH_CLIENT) |                          STREAM_HEADER_SIZE_BYTES) | ||||||
| from ..errors import (DockerException, InvalidVersion, TLSParameterError, | from ..errors import (DockerException, InvalidVersion, TLSParameterError, | ||||||
|                       create_api_error_from_http_exception) |                       create_api_error_from_http_exception) | ||||||
| from ..tls import TLSConfig | from ..tls import TLSConfig | ||||||
|  | @ -89,6 +89,9 @@ class APIClient( | ||||||
|         user_agent (str): Set a custom user agent for requests to the server. |         user_agent (str): Set a custom user agent for requests to the server. | ||||||
|         credstore_env (dict): Override environment variables when calling the |         credstore_env (dict): Override environment variables when calling the | ||||||
|             credential store process. |             credential store process. | ||||||
|  |         use_ssh_client (bool): If set to `True`, an ssh connection is made | ||||||
|  |             via shelling out to the ssh client. Ensure the ssh client is | ||||||
|  |             installed and configured on the host. | ||||||
|     """ |     """ | ||||||
| 
 | 
 | ||||||
|     __attrs__ = requests.Session.__attrs__ + ['_auth_configs', |     __attrs__ = requests.Session.__attrs__ + ['_auth_configs', | ||||||
|  | @ -100,7 +103,7 @@ class APIClient( | ||||||
|     def __init__(self, base_url=None, version=None, |     def __init__(self, base_url=None, version=None, | ||||||
|                  timeout=DEFAULT_TIMEOUT_SECONDS, tls=False, |                  timeout=DEFAULT_TIMEOUT_SECONDS, tls=False, | ||||||
|                  user_agent=DEFAULT_USER_AGENT, num_pools=None, |                  user_agent=DEFAULT_USER_AGENT, num_pools=None, | ||||||
|                  credstore_env=None): |                  credstore_env=None, use_ssh_client=False): | ||||||
|         super(APIClient, self).__init__() |         super(APIClient, self).__init__() | ||||||
| 
 | 
 | ||||||
|         if tls and not base_url: |         if tls and not base_url: | ||||||
|  | @ -162,7 +165,7 @@ class APIClient( | ||||||
|             try: |             try: | ||||||
|                 self._custom_adapter = SSHHTTPAdapter( |                 self._custom_adapter = SSHHTTPAdapter( | ||||||
|                     base_url, timeout, pool_connections=num_pools, |                     base_url, timeout, pool_connections=num_pools, | ||||||
|                     shell_out=DEFAULT_SSH_CLIENT |                     shell_out=use_ssh_client | ||||||
|                 ) |                 ) | ||||||
|             except NameError: |             except NameError: | ||||||
|                 raise DockerException( |                 raise DockerException( | ||||||
|  |  | ||||||
|  | @ -35,6 +35,9 @@ class DockerClient(object): | ||||||
|         user_agent (str): Set a custom user agent for requests to the server. |         user_agent (str): Set a custom user agent for requests to the server. | ||||||
|         credstore_env (dict): Override environment variables when calling the |         credstore_env (dict): Override environment variables when calling the | ||||||
|             credential store process. |             credential store process. | ||||||
|  |         use_ssh_client (bool): If set to `True`, an ssh connection is made | ||||||
|  |             via shelling out to the ssh client. Ensure the ssh client is | ||||||
|  |             installed and configured on the host. | ||||||
|     """ |     """ | ||||||
|     def __init__(self, *args, **kwargs): |     def __init__(self, *args, **kwargs): | ||||||
|         self.api = APIClient(*args, **kwargs) |         self.api = APIClient(*args, **kwargs) | ||||||
|  | @ -70,6 +73,9 @@ class DockerClient(object): | ||||||
|                 from. Default: the value of ``os.environ`` |                 from. Default: the value of ``os.environ`` | ||||||
|             credstore_env (dict): Override environment variables when calling |             credstore_env (dict): Override environment variables when calling | ||||||
|                 the credential store process. |                 the credential store process. | ||||||
|  |             use_ssh_client (bool): If set to `True`, an ssh connection is | ||||||
|  |                 made via shelling out to the ssh client. Ensure the ssh | ||||||
|  |                 client is installed and configured on the host. | ||||||
| 
 | 
 | ||||||
|         Example: |         Example: | ||||||
| 
 | 
 | ||||||
|  | @ -81,8 +87,12 @@ class DockerClient(object): | ||||||
|         """ |         """ | ||||||
|         timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS) |         timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS) | ||||||
|         version = kwargs.pop('version', None) |         version = kwargs.pop('version', None) | ||||||
|  |         use_ssh_client = kwargs.pop('use_ssh_client', False) | ||||||
|         return cls( |         return cls( | ||||||
|             timeout=timeout, version=version, **kwargs_from_env(**kwargs) |             timeout=timeout, | ||||||
|  |             version=version, | ||||||
|  |             use_ssh_client=use_ssh_client, | ||||||
|  |             **kwargs_from_env(**kwargs) | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     # Resources |     # Resources | ||||||
|  |  | ||||||
|  | @ -40,5 +40,3 @@ DEFAULT_DATA_CHUNK_SIZE = 1024 * 2048 | ||||||
| 
 | 
 | ||||||
| DEFAULT_SWARM_ADDR_POOL = ['10.0.0.0/8'] | DEFAULT_SWARM_ADDR_POOL = ['10.0.0.0/8'] | ||||||
| DEFAULT_SWARM_SUBNET_SIZE = 24 | DEFAULT_SWARM_SUBNET_SIZE = 24 | ||||||
| 
 |  | ||||||
| DEFAULT_SSH_CLIENT = True |  | ||||||
|  |  | ||||||
|  | @ -1,3 +1,4 @@ | ||||||
|  | import io | ||||||
| import paramiko | import paramiko | ||||||
| import requests.adapters | import requests.adapters | ||||||
| import six | import six | ||||||
|  | @ -53,7 +54,7 @@ def create_paramiko_client(base_url): | ||||||
| 
 | 
 | ||||||
|     ssh_client.load_system_host_keys() |     ssh_client.load_system_host_keys() | ||||||
|     ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) |     ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) | ||||||
|     return ssh_client |     return ssh_client, ssh_params | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class SSHSocket(socket.socket): | class SSHSocket(socket.socket): | ||||||
|  | @ -61,12 +62,18 @@ class SSHSocket(socket.socket): | ||||||
|         super(SSHSocket, self).__init__( |         super(SSHSocket, self).__init__( | ||||||
|             socket.AF_INET, socket.SOCK_STREAM) |             socket.AF_INET, socket.SOCK_STREAM) | ||||||
|         self.host = host |         self.host = host | ||||||
|  |         self.port = None | ||||||
|  |         if ':' in host: | ||||||
|  |             self.host, self.port = host.split(':') | ||||||
|         self.proc = None |         self.proc = None | ||||||
| 
 | 
 | ||||||
|     def connect(self, **kwargs): |     def connect(self, **kwargs): | ||||||
|  |         port = '' if not self.port else '-p {}'.format(self.port) | ||||||
|         args = [ |         args = [ | ||||||
|             'ssh', |             'ssh', | ||||||
|  |             '-q', | ||||||
|             self.host, |             self.host, | ||||||
|  |             port, | ||||||
|             'docker system dial-stdio' |             'docker system dial-stdio' | ||||||
|         ] |         ] | ||||||
|         self.proc = subprocess.Popen( |         self.proc = subprocess.Popen( | ||||||
|  | @ -82,23 +89,36 @@ class SSHSocket(socket.socket): | ||||||
|         self.proc.stdin.write(msg) |         self.proc.stdin.write(msg) | ||||||
|         self.proc.stdin.flush() |         self.proc.stdin.flush() | ||||||
| 
 | 
 | ||||||
|  |     def send(self, msg): | ||||||
|  |         if not self.proc or self.proc.stdin.closed: | ||||||
|  |             raise Exception('SSH subprocess not initiated.' | ||||||
|  |                             'connect() must be called first.') | ||||||
|  |         self.proc.stdin.write(msg) | ||||||
|  |         self.proc.stdin.flush() | ||||||
|  |         return len(msg) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|     def recv(self): |     def recv(self): | ||||||
|         if not self.proc: |         if not self.proc: | ||||||
|             raise Exception('SSH subprocess not initiated.' |             raise Exception('SSH subprocess not initiated.' | ||||||
|                             'connect() must be called first.') |                             'connect() must be called first.') | ||||||
|         return self.proc.stdout.read() |         response = self.proc.stdout.read() | ||||||
|  |         return response | ||||||
|          |          | ||||||
|     def makefile(self, mode): |     def makefile(self, mode): | ||||||
|  |         if not self.proc or self.proc.stdout.closed: | ||||||
|  |             buf = io.BytesIO() | ||||||
|  |             buf.write(b'\n\n') | ||||||
|  |             return buf | ||||||
|         return self.proc.stdout |         return self.proc.stdout | ||||||
| 
 | 
 | ||||||
|     def close(self): |     def close(self): | ||||||
|         if not self.proc: |         if not self.proc or self.proc.stdin.closed: | ||||||
|             return |             return | ||||||
|         self.proc.stdin.write(b'\n\n') |         self.proc.stdin.write(b'\n\n') | ||||||
|         self.proc.stdin.flush() |         self.proc.stdin.flush() | ||||||
|         self.proc.terminate() |         self.proc.terminate() | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| class SSHConnection(httplib.HTTPConnection, object): | class SSHConnection(httplib.HTTPConnection, object): | ||||||
|     def __init__(self, ssh_transport=None, timeout=60, host=None): |     def __init__(self, ssh_transport=None, timeout=60, host=None): | ||||||
|         super(SSHConnection, self).__init__( |         super(SSHConnection, self).__init__( | ||||||
|  | @ -134,8 +154,13 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): | ||||||
|         self.timeout = timeout |         self.timeout = timeout | ||||||
|         self.host = host |         self.host = host | ||||||
|          |          | ||||||
|  |         # self.base_url = six.moves.urllib_parse.urlparse(host) | ||||||
|  |         self.port = None | ||||||
|  |         if ':' in host: | ||||||
|  |             self.host, self.port = host.split(':') | ||||||
|  | 
 | ||||||
|     def _new_conn(self): |     def _new_conn(self): | ||||||
|         return SSHConnection(self.ssh_transport, self.timeout) |         return SSHConnection(self.ssh_transport, self.timeout, self.host) | ||||||
| 
 | 
 | ||||||
|     # When re-using connections, urllib3 calls fileno() on our |     # When re-using connections, urllib3 calls fileno() on our | ||||||
|     # SSH channel instance, quickly overloading our fd limit. To avoid this, |     # SSH channel instance, quickly overloading our fd limit. To avoid this, | ||||||
|  | @ -171,9 +196,9 @@ class SSHHTTPAdapter(BaseHTTPAdapter): | ||||||
|                  shell_out=True): |                  shell_out=True): | ||||||
|         self.ssh_client = None |         self.ssh_client = None | ||||||
|         if not shell_out: |         if not shell_out: | ||||||
|             self.ssh_client = create_paramiko_client(base_url) |             self.ssh_client, self.ssh_params = create_paramiko_client(base_url) | ||||||
|             self._connect() |             self._connect() | ||||||
| 
 |         base_url = base_url.lstrip('ssh://') | ||||||
|         self.host = base_url |         self.host = base_url | ||||||
|         self.timeout = timeout |         self.timeout = timeout | ||||||
|         self.pools = RecentlyUsedContainer( |         self.pools = RecentlyUsedContainer( | ||||||
|  |  | ||||||
|  | @ -32,3 +32,11 @@ RUN pip install -r test-requirements.txt | ||||||
| 
 | 
 | ||||||
| COPY . /src | COPY . /src | ||||||
| RUN pip install . | RUN pip install . | ||||||
|  | 
 | ||||||
|  | # install SSHD | ||||||
|  | RUN apt-get install -y openssh-client | ||||||
|  | 
 | ||||||
|  | # Add the keys and set permissions | ||||||
|  | COPY tests/ssh-keys /root/.ssh | ||||||
|  | RUN chmod 600 /root/.ssh/id_rsa && \ | ||||||
|  |     chmod 600 /root/.ssh/id_rsa.pub | ||||||
|  | @ -0,0 +1,26 @@ | ||||||
|  | ARG API_VERSION=1.39 | ||||||
|  | ARG ENGINE_VERSION=19.03.12 | ||||||
|  | 
 | ||||||
|  | FROM docker:${ENGINE_VERSION}-dind | ||||||
|  | 
 | ||||||
|  | RUN apk add --no-cache \ | ||||||
|  | 		openssh | ||||||
|  | 
 | ||||||
|  | # Add the keys and set permissions | ||||||
|  | RUN ssh-keygen -A | ||||||
|  | 
 | ||||||
|  | # copy the test SSH config | ||||||
|  | # Add the keys and set permissions | ||||||
|  | COPY ./tests/ssh-keys/authorized_keys /root/.ssh/authorized_keys | ||||||
|  | RUN chmod 600 /root/.ssh/authorized_keys | ||||||
|  | 
 | ||||||
|  | RUN echo "IgnoreUserKnownHosts yes" >> /etc/ssh/sshd_config && \ | ||||||
|  |   echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config && \ | ||||||
|  |   echo "PermitRootLogin yes" >> /etc/ssh/sshd_config | ||||||
|  | 
 | ||||||
|  | RUN echo "root:root" | chpasswd | ||||||
|  | RUN ln -s /usr/local/bin/docker /usr/bin/docker | ||||||
|  | EXPOSE 22 | ||||||
|  | 
 | ||||||
|  | # CMD /usr/sbin/sshd -D -o ListenAddress=0.0.0.0 & && \ | ||||||
|  | #  dockerd --experimental | ||||||
|  | @ -339,7 +339,6 @@ class BuildTest(BaseAPIIntegrationTest): | ||||||
| 
 | 
 | ||||||
|         assert self.client.inspect_image(img_name) |         assert self.client.inspect_image(img_name) | ||||||
|         ctnr = self.run_container(img_name, 'cat /hosts-file') |         ctnr = self.run_container(img_name, 'cat /hosts-file') | ||||||
|         self.tmp_containers.append(ctnr) |  | ||||||
|         logs = self.client.logs(ctnr) |         logs = self.client.logs(ctnr) | ||||||
|         if six.PY3: |         if six.PY3: | ||||||
|             logs = logs.decode('utf-8') |             logs = logs.decode('utf-8') | ||||||
|  |  | ||||||
|  | @ -0,0 +1 @@ | ||||||
|  | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/BiXkbL9oEbE3PJv1S2p12XK5BHW3qQT5Rf+CYG0ATYyMPIVM6+IXVyf3QNxpnvPXvbPBQJCs0qHeuPwZy2Gsbt35QnmlgrczFPiXXosCD2N+wrcOQPZGuLjQyUUP2yJRVSTLpp8zk2F8w3laGIB3Jk1hUcMUExemKxQYk/L40b5rXKkarLk5awBuicjRStMrchPRHZ2n715TG+zSvf8tB/UHRXKYPqai/Je5eiH3yGUzCY4zn+uEoqAFb4V8lpIj8Rw3EXmCYVwG0vg+44QIQ2gJnIhTlcmxwkynvZn97nug4NLlGJQ+sDCnIvMapycHfGkNlBz3fFtu/ORsxPpZbTNg/9noa3Zf8OpIwvE/FHNPqDctGltwxEgQxj5fE34x0fYnF08tejAUJJCZE3YsGgNabsS4pD+kRhI83eFZvgj3Q1AeTK0V9bRM7jujcc9Rz+V9Gb5zYEHN/l8PxEVlj0OlURf9ZlknNQK8xRh597jDXTfVQKCMO/nRaWH2bq0=  | ||||||
|  | @ -0,0 +1,3 @@ | ||||||
|  | Host * | ||||||
|  | 	StrictHostKeyChecking no | ||||||
|  | 	UserKnownHostsFile=/dev/null | ||||||
|  | @ -0,0 +1,38 @@ | ||||||
|  | -----BEGIN OPENSSH PRIVATE KEY----- | ||||||
|  | b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABlwAAAAdzc2gtcn | ||||||
|  | NhAAAAAwEAAQAAAYEAvwYl5Gy/aBGxNzyb9UtqddlyuQR1t6kE+UX/gmBtAE2MjDyFTOvi | ||||||
|  | F1cn90DcaZ7z172zwUCQrNKh3rj8GcthrG7d+UJ5pYK3MxT4l16LAg9jfsK3DkD2Rri40M | ||||||
|  | lFD9siUVUky6afM5NhfMN5WhiAdyZNYVHDFBMXpisUGJPy+NG+a1ypGqy5OWsAbonI0UrT | ||||||
|  | K3IT0R2dp+9eUxvs0r3/LQf1B0VymD6movyXuXoh98hlMwmOM5/rhKKgBW+FfJaSI/EcNx | ||||||
|  | F5gmFcBtL4PuOECENoCZyIU5XJscJMp72Z/e57oODS5RiUPrAwpyLzGqcnB3xpDZQc93xb | ||||||
|  | bvzkbMT6WW0zYP/Z6Gt2X/DqSMLxPxRzT6g3LRpbcMRIEMY+XxN+MdH2JxdPLXowFCSQmR | ||||||
|  | N2LBoDWm7EuKQ/pEYSPN3hWb4I90NQHkytFfW0TO47o3HPUc/lfRm+c2BBzf5fD8RFZY9D | ||||||
|  | pVEX/WZZJzUCvMUYefe4w1031UCgjDv50Wlh9m6tAAAFeM2kMyHNpDMhAAAAB3NzaC1yc2 | ||||||
|  | EAAAGBAL8GJeRsv2gRsTc8m/VLanXZcrkEdbepBPlF/4JgbQBNjIw8hUzr4hdXJ/dA3Gme | ||||||
|  | 89e9s8FAkKzSod64/BnLYaxu3flCeaWCtzMU+JdeiwIPY37Ctw5A9ka4uNDJRQ/bIlFVJM | ||||||
|  | umnzOTYXzDeVoYgHcmTWFRwxQTF6YrFBiT8vjRvmtcqRqsuTlrAG6JyNFK0ytyE9Ednafv | ||||||
|  | XlMb7NK9/y0H9QdFcpg+pqL8l7l6IffIZTMJjjOf64SioAVvhXyWkiPxHDcReYJhXAbS+D | ||||||
|  | 7jhAhDaAmciFOVybHCTKe9mf3ue6Dg0uUYlD6wMKci8xqnJwd8aQ2UHPd8W2785GzE+llt | ||||||
|  | M2D/2ehrdl/w6kjC8T8Uc0+oNy0aW3DESBDGPl8TfjHR9icXTy16MBQkkJkTdiwaA1puxL | ||||||
|  | ikP6RGEjzd4Vm+CPdDUB5MrRX1tEzuO6Nxz1HP5X0ZvnNgQc3+Xw/ERWWPQ6VRF/1mWSc1 | ||||||
|  | ArzFGHn3uMNdN9VAoIw7+dFpYfZurQAAAAMBAAEAAAGBAKtnotyiz+Vb6r57vh2OvEpfAd | ||||||
|  | gOrmpMWVArhSfBykz5SOIU9C+fgVIcPJpaMuz7WiX97Ku9eZP5tJGbP2sN2ejV2ovtICZp | ||||||
|  | cmV9rcp1ZRpGIKr/oS5DEDlJS1zdHQErSlHcqpWqPzQSTOmcpOk5Dxza25g1u2vp7dCG2x | ||||||
|  | NqvhySZ+ECViK/Vby1zL9jFzTlhTJ4vFtpzauA2AyPBCPdpHkNqMoLgNYncXLSYHpnos8p | ||||||
|  | m9T+AAFGwBhVrGz0Mr0mhRDnV/PgbKplKT7l+CGceb8LuWmj/vzuP5Wv6dglw3hJnT2V5p | ||||||
|  | nTBp3dJ6R006+yvr5T/Xb+ObGqFfgfenjLfHjqbJ/gZdGWt4Le84g8tmSkjJBJ2Yj3kynQ | ||||||
|  | sdfv9k7JJ4t5euoje0XW0YVN1ih5DdyO4hHDRD1lSTFYT5Gl2sCTt28qsMC12rWzFkezJo | ||||||
|  | Fhewq2Ddtg4AK6SxqH4rFQCmgOR/ci7jv9TXS9xEQxYliyN5aNymRTyXmwqBIzjNKR6QAA | ||||||
|  | AMEAxpme2upng9LS6Epa83d1gnWUilYPbpb1C8+1FgpnBv9zkjFE1vY0Vu4i9LcLGlCQ0x | ||||||
|  | PB1Z16TQlEluqiSuSA0eyaWSQBF9NyGsOCOZ63lpJs/2FRBfcbUvHhv8/g1fv/xvI+FnE+ | ||||||
|  | DoAhz8V3byU8HUZer7pQY3hSxisdYdsaromxC8DSSPFQoxpxwh7WuP4c3veWkdL13h4fSN | ||||||
|  | khGr3G1XGfsZOu6V6F1i7yMU6OcwBAxzPsHqZv66sT8lE6n4xjAAAAwQDzAaVaJqZ2ROoF | ||||||
|  | loltJZUtE7o+zpoDzjOJyGYaCYTU4dHPN1aeYBjw8QfmJhdmZfJp9AeJDB/W0wzoHi2ONI | ||||||
|  | chnQ1EdbCLk9pvA7rhfVdZaxPeHwniDp2iA/wZKTRG3hav9nEzS72uXuZprCsbBvGXeR0z | ||||||
|  | iuIx5odVXG8qyuI9lDY6B/IoLg7zd+V6iw9mqWYlLLsgHiAvg32LAT4j0KoTufOqpnxqTQ | ||||||
|  | P2EguTmxDWkfQmbEHdJvbD2tLQ90zMlwMAAADBAMk88wOA1i/TibH5gm/lAtKPcNKbrHfk | ||||||
|  | 7O9gdSZd2HL0fLjptpOplS89Y7muTElsRDRGiKq+7KV/sxQRNcITkxdTKu8CKnftFWHrLk | ||||||
|  | 9WHWVHXbu9h8ttsKeUr9i27ojxpe5I82of8k7fJTg1LxMnGzuDZfq1BGsQnOWrY7r1Yjcd | ||||||
|  | 8EtSrwOB+J/S4U+rR6kwUEFYeBkhE599P1EtHTCm8kWh368di9Q+Y/VIOa3qRx4hxuiCLI | ||||||
|  | qj4ZpdVMk2cCNcjwAAAAAB | ||||||
|  | -----END OPENSSH PRIVATE KEY----- | ||||||
|  | @ -0,0 +1 @@ | ||||||
|  | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/BiXkbL9oEbE3PJv1S2p12XK5BHW3qQT5Rf+CYG0ATYyMPIVM6+IXVyf3QNxpnvPXvbPBQJCs0qHeuPwZy2Gsbt35QnmlgrczFPiXXosCD2N+wrcOQPZGuLjQyUUP2yJRVSTLpp8zk2F8w3laGIB3Jk1hUcMUExemKxQYk/L40b5rXKkarLk5awBuicjRStMrchPRHZ2n715TG+zSvf8tB/UHRXKYPqai/Je5eiH3yGUzCY4zn+uEoqAFb4V8lpIj8Rw3EXmCYVwG0vg+44QIQ2gJnIhTlcmxwkynvZn97nug4NLlGJQ+sDCnIvMapycHfGkNlBz3fFtu/ORsxPpZbTNg/9noa3Zf8OpIwvE/FHNPqDctGltwxEgQxj5fE34x0fYnF08tejAUJJCZE3YsGgNabsS4pD+kRhI83eFZvgj3Q1AeTK0V9bRM7jujcc9Rz+V9Gb5zYEHN/l8PxEVlj0OlURf9ZlknNQK8xRh597jDXTfVQKCMO/nRaWH2bq0=  | ||||||
|  | @ -0,0 +1,595 @@ | ||||||
|  | import io | ||||||
|  | import os | ||||||
|  | import shutil | ||||||
|  | import tempfile | ||||||
|  | 
 | ||||||
|  | from docker import errors | ||||||
|  | from docker.utils.proxy import ProxyConfig | ||||||
|  | 
 | ||||||
|  | import pytest | ||||||
|  | import six | ||||||
|  | 
 | ||||||
|  | from .base import BaseAPIIntegrationTest, TEST_IMG | ||||||
|  | from ..helpers import random_name, requires_api_version, requires_experimental | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class BuildTest(BaseAPIIntegrationTest): | ||||||
|  |     def test_build_with_proxy(self): | ||||||
|  |         self.client._proxy_configs = ProxyConfig( | ||||||
|  |             ftp='a', http='b', https='c', no_proxy='d' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM busybox', | ||||||
|  |             'RUN env | grep "FTP_PROXY=a"', | ||||||
|  |             'RUN env | grep "ftp_proxy=a"', | ||||||
|  |             'RUN env | grep "HTTP_PROXY=b"', | ||||||
|  |             'RUN env | grep "http_proxy=b"', | ||||||
|  |             'RUN env | grep "HTTPS_PROXY=c"', | ||||||
|  |             'RUN env | grep "https_proxy=c"', | ||||||
|  |             'RUN env | grep "NO_PROXY=d"', | ||||||
|  |             'RUN env | grep "no_proxy=d"', | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         self.client.build(fileobj=script, decode=True) | ||||||
|  | 
 | ||||||
|  |     def test_build_with_proxy_and_buildargs(self): | ||||||
|  |         self.client._proxy_configs = ProxyConfig( | ||||||
|  |             ftp='a', http='b', https='c', no_proxy='d' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM busybox', | ||||||
|  |             'RUN env | grep "FTP_PROXY=XXX"', | ||||||
|  |             'RUN env | grep "ftp_proxy=xxx"', | ||||||
|  |             'RUN env | grep "HTTP_PROXY=b"', | ||||||
|  |             'RUN env | grep "http_proxy=b"', | ||||||
|  |             'RUN env | grep "HTTPS_PROXY=c"', | ||||||
|  |             'RUN env | grep "https_proxy=c"', | ||||||
|  |             'RUN env | grep "NO_PROXY=d"', | ||||||
|  |             'RUN env | grep "no_proxy=d"', | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         self.client.build( | ||||||
|  |             fileobj=script, | ||||||
|  |             decode=True, | ||||||
|  |             buildargs={'FTP_PROXY': 'XXX', 'ftp_proxy': 'xxx'} | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     def test_build_streaming(self): | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM busybox', | ||||||
|  |             'RUN mkdir -p /tmp/test', | ||||||
|  |             'EXPOSE 8080', | ||||||
|  |             'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' | ||||||
|  |             ' /tmp/silence.tar.gz' | ||||||
|  |         ]).encode('ascii')) | ||||||
|  |         stream = self.client.build(fileobj=script, decode=True) | ||||||
|  |         logs = [] | ||||||
|  |         for chunk in stream: | ||||||
|  |             logs.append(chunk) | ||||||
|  |         assert len(logs) > 0 | ||||||
|  | 
 | ||||||
|  |     def test_build_from_stringio(self): | ||||||
|  |         if six.PY3: | ||||||
|  |             return | ||||||
|  |         script = io.StringIO(six.text_type('\n').join([ | ||||||
|  |             'FROM busybox', | ||||||
|  |             'RUN mkdir -p /tmp/test', | ||||||
|  |             'EXPOSE 8080', | ||||||
|  |             'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' | ||||||
|  |             ' /tmp/silence.tar.gz' | ||||||
|  |         ])) | ||||||
|  |         stream = self.client.build(fileobj=script) | ||||||
|  |         logs = '' | ||||||
|  |         for chunk in stream: | ||||||
|  |             if six.PY3: | ||||||
|  |                 chunk = chunk.decode('utf-8') | ||||||
|  |             logs += chunk | ||||||
|  |         assert logs != '' | ||||||
|  | 
 | ||||||
|  |     def test_build_with_dockerignore(self): | ||||||
|  |         base_dir = tempfile.mkdtemp() | ||||||
|  |         self.addCleanup(shutil.rmtree, base_dir) | ||||||
|  | 
 | ||||||
|  |         with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f: | ||||||
|  |             f.write("\n".join([ | ||||||
|  |                 'FROM busybox', | ||||||
|  |                 'ADD . /test', | ||||||
|  |             ])) | ||||||
|  | 
 | ||||||
|  |         with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: | ||||||
|  |             f.write("\n".join([ | ||||||
|  |                 'ignored', | ||||||
|  |                 'Dockerfile', | ||||||
|  |                 '.dockerignore', | ||||||
|  |                 '!ignored/subdir/excepted-file', | ||||||
|  |                 '',  # empty line, | ||||||
|  |                 '#*',  # comment line | ||||||
|  |             ])) | ||||||
|  | 
 | ||||||
|  |         with open(os.path.join(base_dir, 'not-ignored'), 'w') as f: | ||||||
|  |             f.write("this file should not be ignored") | ||||||
|  | 
 | ||||||
|  |         with open(os.path.join(base_dir, '#file.txt'), 'w') as f: | ||||||
|  |             f.write('this file should not be ignored') | ||||||
|  | 
 | ||||||
|  |         subdir = os.path.join(base_dir, 'ignored', 'subdir') | ||||||
|  |         os.makedirs(subdir) | ||||||
|  |         with open(os.path.join(subdir, 'file'), 'w') as f: | ||||||
|  |             f.write("this file should be ignored") | ||||||
|  | 
 | ||||||
|  |         with open(os.path.join(subdir, 'excepted-file'), 'w') as f: | ||||||
|  |             f.write("this file should not be ignored") | ||||||
|  | 
 | ||||||
|  |         tag = 'docker-py-test-build-with-dockerignore' | ||||||
|  |         stream = self.client.build( | ||||||
|  |             path=base_dir, | ||||||
|  |             tag=tag, | ||||||
|  |         ) | ||||||
|  |         for chunk in stream: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         c = self.client.create_container(tag, ['find', '/test', '-type', 'f']) | ||||||
|  |         self.client.start(c) | ||||||
|  |         self.client.wait(c) | ||||||
|  |         logs = self.client.logs(c) | ||||||
|  | 
 | ||||||
|  |         if six.PY3: | ||||||
|  |             logs = logs.decode('utf-8') | ||||||
|  | 
 | ||||||
|  |         assert sorted(list(filter(None, logs.split('\n')))) == sorted([ | ||||||
|  |             '/test/#file.txt', | ||||||
|  |             '/test/ignored/subdir/excepted-file', | ||||||
|  |             '/test/not-ignored' | ||||||
|  |         ]) | ||||||
|  | 
 | ||||||
|  |     def test_build_with_buildargs(self): | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM scratch', | ||||||
|  |             'ARG test', | ||||||
|  |             'USER $test' | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, tag='buildargs', buildargs={'test': 'OK'} | ||||||
|  |         ) | ||||||
|  |         self.tmp_imgs.append('buildargs') | ||||||
|  |         for chunk in stream: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         info = self.client.inspect_image('buildargs') | ||||||
|  |         assert info['Config']['User'] == 'OK' | ||||||
|  | 
 | ||||||
|  |     @requires_api_version('1.22') | ||||||
|  |     def test_build_shmsize(self): | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM scratch', | ||||||
|  |             'CMD sh -c "echo \'Hello, World!\'"', | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         tag = 'shmsize' | ||||||
|  |         shmsize = 134217728 | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, tag=tag, shmsize=shmsize | ||||||
|  |         ) | ||||||
|  |         self.tmp_imgs.append(tag) | ||||||
|  |         for chunk in stream: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         # There is currently no way to get the shmsize | ||||||
|  |         # that was used to build the image | ||||||
|  | 
 | ||||||
|  |     @requires_api_version('1.24') | ||||||
|  |     def test_build_isolation(self): | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM scratch', | ||||||
|  |             'CMD sh -c "echo \'Deaf To All But The Song\'' | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, tag='isolation', | ||||||
|  |             isolation='default' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         for chunk in stream: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |     @requires_api_version('1.23') | ||||||
|  |     def test_build_labels(self): | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM scratch', | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         labels = {'test': 'OK'} | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, tag='labels', labels=labels | ||||||
|  |         ) | ||||||
|  |         self.tmp_imgs.append('labels') | ||||||
|  |         for chunk in stream: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         info = self.client.inspect_image('labels') | ||||||
|  |         assert info['Config']['Labels'] == labels | ||||||
|  | 
 | ||||||
|  |     @requires_api_version('1.25') | ||||||
|  |     def test_build_with_cache_from(self): | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM busybox', | ||||||
|  |             'ENV FOO=bar', | ||||||
|  |             'RUN touch baz', | ||||||
|  |             'RUN touch bax', | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build(fileobj=script, tag='build1') | ||||||
|  |         self.tmp_imgs.append('build1') | ||||||
|  |         for chunk in stream: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, tag='build2', cache_from=['build1'], | ||||||
|  |             decode=True | ||||||
|  |         ) | ||||||
|  |         self.tmp_imgs.append('build2') | ||||||
|  |         counter = 0 | ||||||
|  |         for chunk in stream: | ||||||
|  |             if 'Using cache' in chunk.get('stream', ''): | ||||||
|  |                 counter += 1 | ||||||
|  |         assert counter == 3 | ||||||
|  |         self.client.remove_image('build2') | ||||||
|  | 
 | ||||||
|  |         counter = 0 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, tag='build2', cache_from=['nosuchtag'], | ||||||
|  |             decode=True | ||||||
|  |         ) | ||||||
|  |         for chunk in stream: | ||||||
|  |             if 'Using cache' in chunk.get('stream', ''): | ||||||
|  |                 counter += 1 | ||||||
|  |         assert counter == 0 | ||||||
|  | 
 | ||||||
|  |     @requires_api_version('1.29') | ||||||
|  |     def test_build_container_with_target(self): | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM busybox as first', | ||||||
|  |             'RUN mkdir -p /tmp/test', | ||||||
|  |             'RUN touch /tmp/silence.tar.gz', | ||||||
|  |             'FROM alpine:latest', | ||||||
|  |             'WORKDIR /root/' | ||||||
|  |             'COPY --from=first /tmp/silence.tar.gz .', | ||||||
|  |             'ONBUILD RUN echo "This should not be in the final image"' | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, target='first', tag='build1' | ||||||
|  |         ) | ||||||
|  |         self.tmp_imgs.append('build1') | ||||||
|  |         for chunk in stream: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         info = self.client.inspect_image('build1') | ||||||
|  |         assert not info['Config']['OnBuild'] | ||||||
|  | 
 | ||||||
|  |     @requires_api_version('1.25') | ||||||
|  |     def test_build_with_network_mode(self): | ||||||
|  |         # Set up pingable endpoint on custom network | ||||||
|  |         network = self.client.create_network(random_name())['Id'] | ||||||
|  |         self.tmp_networks.append(network) | ||||||
|  |         container = self.client.create_container(TEST_IMG, 'top') | ||||||
|  |         self.tmp_containers.append(container) | ||||||
|  |         self.client.start(container) | ||||||
|  |         self.client.connect_container_to_network( | ||||||
|  |             container, network, aliases=['pingtarget.docker'] | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM busybox', | ||||||
|  |             'RUN ping -c1 pingtarget.docker' | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, network_mode=network, | ||||||
|  |             tag='dockerpytest_customnetbuild' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         self.tmp_imgs.append('dockerpytest_customnetbuild') | ||||||
|  |         for chunk in stream: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         assert self.client.inspect_image('dockerpytest_customnetbuild') | ||||||
|  | 
 | ||||||
|  |         script.seek(0) | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, network_mode='none', | ||||||
|  |             tag='dockerpytest_nonebuild', nocache=True, decode=True | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         self.tmp_imgs.append('dockerpytest_nonebuild') | ||||||
|  |         logs = [chunk for chunk in stream] | ||||||
|  |         assert 'errorDetail' in logs[-1] | ||||||
|  |         assert logs[-1]['errorDetail']['code'] == 1 | ||||||
|  | 
 | ||||||
|  |         with pytest.raises(errors.NotFound): | ||||||
|  |             self.client.inspect_image('dockerpytest_nonebuild') | ||||||
|  | 
 | ||||||
|  |     @requires_api_version('1.27') | ||||||
|  |     def test_build_with_extra_hosts(self): | ||||||
|  |         img_name = 'dockerpytest_extrahost_build' | ||||||
|  |         self.tmp_imgs.append(img_name) | ||||||
|  | 
 | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM busybox', | ||||||
|  |             'RUN ping -c1 hello.world.test', | ||||||
|  |             'RUN ping -c1 extrahost.local.test', | ||||||
|  |             'RUN cp /etc/hosts /hosts-file' | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, tag=img_name, | ||||||
|  |             extra_hosts={ | ||||||
|  |                 'extrahost.local.test': '127.0.0.1', | ||||||
|  |                 'hello.world.test': '127.0.0.1', | ||||||
|  |             }, decode=True | ||||||
|  |         ) | ||||||
|  |         for chunk in stream: | ||||||
|  |             if 'errorDetail' in chunk: | ||||||
|  |                 pytest.fail(chunk) | ||||||
|  | 
 | ||||||
|  |         assert self.client.inspect_image(img_name) | ||||||
|  |         ctnr = self.run_container(img_name, 'cat /hosts-file') | ||||||
|  |         logs = self.client.logs(ctnr) | ||||||
|  |         if six.PY3: | ||||||
|  |             logs = logs.decode('utf-8') | ||||||
|  |         assert '127.0.0.1\textrahost.local.test' in logs | ||||||
|  |         assert '127.0.0.1\thello.world.test' in logs | ||||||
|  | 
 | ||||||
|  |     @requires_experimental(until=None) | ||||||
|  |     @requires_api_version('1.25') | ||||||
|  |     def test_build_squash(self): | ||||||
|  |         script = io.BytesIO('\n'.join([ | ||||||
|  |             'FROM busybox', | ||||||
|  |             'RUN echo blah > /file_1', | ||||||
|  |             'RUN echo blahblah > /file_2', | ||||||
|  |             'RUN echo blahblahblah > /file_3' | ||||||
|  |         ]).encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         def build_squashed(squash): | ||||||
|  |             tag = 'squash' if squash else 'nosquash' | ||||||
|  |             stream = self.client.build( | ||||||
|  |                 fileobj=script, tag=tag, squash=squash | ||||||
|  |             ) | ||||||
|  |             self.tmp_imgs.append(tag) | ||||||
|  |             for chunk in stream: | ||||||
|  |                 pass | ||||||
|  | 
 | ||||||
|  |             return self.client.inspect_image(tag) | ||||||
|  | 
 | ||||||
|  |         non_squashed = build_squashed(False) | ||||||
|  |         squashed = build_squashed(True) | ||||||
|  |         assert len(non_squashed['RootFS']['Layers']) == 4 | ||||||
|  |         assert len(squashed['RootFS']['Layers']) == 2 | ||||||
|  | 
 | ||||||
|  |     def test_build_stderr_data(self): | ||||||
|  |         control_chars = ['\x1b[91m', '\x1b[0m'] | ||||||
|  |         snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)' | ||||||
|  |         script = io.BytesIO(b'\n'.join([ | ||||||
|  |             b'FROM busybox', | ||||||
|  |             'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8') | ||||||
|  |         ])) | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             fileobj=script, decode=True, nocache=True | ||||||
|  |         ) | ||||||
|  |         lines = [] | ||||||
|  |         for chunk in stream: | ||||||
|  |             lines.append(chunk.get('stream')) | ||||||
|  |         expected = '{0}{2}\n{1}'.format( | ||||||
|  |             control_chars[0], control_chars[1], snippet | ||||||
|  |         ) | ||||||
|  |         assert any([line == expected for line in lines]) | ||||||
|  | 
 | ||||||
|  |     def test_build_gzip_encoding(self): | ||||||
|  |         base_dir = tempfile.mkdtemp() | ||||||
|  |         self.addCleanup(shutil.rmtree, base_dir) | ||||||
|  | 
 | ||||||
|  |         with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f: | ||||||
|  |             f.write("\n".join([ | ||||||
|  |                 'FROM busybox', | ||||||
|  |                 'ADD . /test', | ||||||
|  |             ])) | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             path=base_dir, decode=True, nocache=True, | ||||||
|  |             gzip=True | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         lines = [] | ||||||
|  |         for chunk in stream: | ||||||
|  |             lines.append(chunk) | ||||||
|  | 
 | ||||||
|  |         assert 'Successfully built' in lines[-1]['stream'] | ||||||
|  | 
 | ||||||
|  |     def test_build_with_dockerfile_empty_lines(self): | ||||||
|  |         base_dir = tempfile.mkdtemp() | ||||||
|  |         self.addCleanup(shutil.rmtree, base_dir) | ||||||
|  |         with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f: | ||||||
|  |             f.write('FROM busybox\n') | ||||||
|  |         with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: | ||||||
|  |             f.write('\n'.join([ | ||||||
|  |                 '   ', | ||||||
|  |                 '', | ||||||
|  |                 '\t\t', | ||||||
|  |                 '\t     ', | ||||||
|  |             ])) | ||||||
|  | 
 | ||||||
|  |         stream = self.client.build( | ||||||
|  |             path=base_dir, decode=True, nocache=True | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         lines = [] | ||||||
|  |         for chunk in stream: | ||||||
|  |             lines.append(chunk) | ||||||
|  |         assert 'Successfully built' in lines[-1]['stream'] | ||||||
|  | 
 | ||||||
|  |     def test_build_gzip_custom_encoding(self): | ||||||
|  |         with pytest.raises(errors.DockerException): | ||||||
|  |             self.client.build(path='.', gzip=True, encoding='text/html') | ||||||
|  | 
 | ||||||
|  |     @requires_api_version('1.32') | ||||||
|  |     @requires_experimental(until=None) | ||||||
|  |     def test_build_invalid_platform(self): | ||||||
|  |         script = io.BytesIO('FROM busybox\n'.encode('ascii')) | ||||||
|  | 
 | ||||||
|  |         with pytest.raises(errors.APIError) as excinfo: | ||||||
|  |             stream = self.client.build(fileobj=script, platform='foobar') | ||||||
|  |             for _ in stream: | ||||||
|  |                 pass | ||||||
|  | 
 | ||||||
|  |         # Some API versions incorrectly returns 500 status; assert 4xx or 5xx | ||||||
|  |         assert excinfo.value.is_error() | ||||||
|  |         assert 'unknown operating system' in excinfo.exconly() \ | ||||||
|  |                or 'invalid platform' in excinfo.exconly() | ||||||
|  | 
 | ||||||
|  |     def test_build_out_of_context_dockerfile(self): | ||||||
|  |         base_dir = tempfile.mkdtemp() | ||||||
|  |         self.addCleanup(shutil.rmtree, base_dir) | ||||||
|  |         with open(os.path.join(base_dir, 'file.txt'), 'w') as f: | ||||||
|  |             f.write('hello world') | ||||||
|  |         with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: | ||||||
|  |             f.write('.dockerignore\n') | ||||||
|  |         df_dir = tempfile.mkdtemp() | ||||||
|  |         self.addCleanup(shutil.rmtree, df_dir) | ||||||
|  |         df_name = os.path.join(df_dir, 'Dockerfile') | ||||||
|  |         with open(df_name, 'wb') as df: | ||||||
|  |             df.write(('\n'.join([ | ||||||
|  |                 'FROM busybox', | ||||||
|  |                 'COPY . /src', | ||||||
|  |                 'WORKDIR /src', | ||||||
|  |             ])).encode('utf-8')) | ||||||
|  |             df.flush() | ||||||
|  |         img_name = random_name() | ||||||
|  |         self.tmp_imgs.append(img_name) | ||||||
|  |         stream = self.client.build( | ||||||
|  |             path=base_dir, dockerfile=df_name, tag=img_name, | ||||||
|  |             decode=True | ||||||
|  |         ) | ||||||
|  |         lines = [] | ||||||
|  |         for chunk in stream: | ||||||
|  |             lines.append(chunk) | ||||||
|  |         assert 'Successfully tagged' in lines[-1]['stream'] | ||||||
|  | 
 | ||||||
|  |         ctnr = self.client.create_container(img_name, 'ls -a') | ||||||
|  |         self.tmp_containers.append(ctnr) | ||||||
|  |         self.client.start(ctnr) | ||||||
|  |         lsdata = self.client.logs(ctnr).strip().split(b'\n') | ||||||
|  |         assert len(lsdata) == 3 | ||||||
|  |         assert sorted([b'.', b'..', b'file.txt']) == sorted(lsdata) | ||||||
|  | 
 | ||||||
|  |     def test_build_in_context_dockerfile(self): | ||||||
|  |         base_dir = tempfile.mkdtemp() | ||||||
|  |         self.addCleanup(shutil.rmtree, base_dir) | ||||||
|  |         with open(os.path.join(base_dir, 'file.txt'), 'w') as f: | ||||||
|  |             f.write('hello world') | ||||||
|  |         with open(os.path.join(base_dir, 'custom.dockerfile'), 'w') as df: | ||||||
|  |             df.write('\n'.join([ | ||||||
|  |                 'FROM busybox', | ||||||
|  |                 'COPY . /src', | ||||||
|  |                 'WORKDIR /src', | ||||||
|  |             ])) | ||||||
|  |         img_name = random_name() | ||||||
|  |         self.tmp_imgs.append(img_name) | ||||||
|  |         stream = self.client.build( | ||||||
|  |             path=base_dir, dockerfile='custom.dockerfile', tag=img_name, | ||||||
|  |             decode=True | ||||||
|  |         ) | ||||||
|  |         lines = [] | ||||||
|  |         for chunk in stream: | ||||||
|  |             lines.append(chunk) | ||||||
|  |         assert 'Successfully tagged' in lines[-1]['stream'] | ||||||
|  | 
 | ||||||
|  |         ctnr = self.client.create_container(img_name, 'ls -a') | ||||||
|  |         self.tmp_containers.append(ctnr) | ||||||
|  |         self.client.start(ctnr) | ||||||
|  |         lsdata = self.client.logs(ctnr).strip().split(b'\n') | ||||||
|  |         assert len(lsdata) == 4 | ||||||
|  |         assert sorted( | ||||||
|  |             [b'.', b'..', b'file.txt', b'custom.dockerfile'] | ||||||
|  |         ) == sorted(lsdata) | ||||||
|  | 
 | ||||||
|  |     def test_build_in_context_nested_dockerfile(self): | ||||||
|  |         base_dir = tempfile.mkdtemp() | ||||||
|  |         self.addCleanup(shutil.rmtree, base_dir) | ||||||
|  |         with open(os.path.join(base_dir, 'file.txt'), 'w') as f: | ||||||
|  |             f.write('hello world') | ||||||
|  |         subdir = os.path.join(base_dir, 'hello', 'world') | ||||||
|  |         os.makedirs(subdir) | ||||||
|  |         with open(os.path.join(subdir, 'custom.dockerfile'), 'w') as df: | ||||||
|  |             df.write('\n'.join([ | ||||||
|  |                 'FROM busybox', | ||||||
|  |                 'COPY . /src', | ||||||
|  |                 'WORKDIR /src', | ||||||
|  |             ])) | ||||||
|  |         img_name = random_name() | ||||||
|  |         self.tmp_imgs.append(img_name) | ||||||
|  |         stream = self.client.build( | ||||||
|  |             path=base_dir, dockerfile='hello/world/custom.dockerfile', | ||||||
|  |             tag=img_name, decode=True | ||||||
|  |         ) | ||||||
|  |         lines = [] | ||||||
|  |         for chunk in stream: | ||||||
|  |             lines.append(chunk) | ||||||
|  |         assert 'Successfully tagged' in lines[-1]['stream'] | ||||||
|  | 
 | ||||||
|  |         ctnr = self.client.create_container(img_name, 'ls -a') | ||||||
|  |         self.tmp_containers.append(ctnr) | ||||||
|  |         self.client.start(ctnr) | ||||||
|  |         lsdata = self.client.logs(ctnr).strip().split(b'\n') | ||||||
|  |         assert len(lsdata) == 4 | ||||||
|  |         assert sorted( | ||||||
|  |             [b'.', b'..', b'file.txt', b'hello'] | ||||||
|  |         ) == sorted(lsdata) | ||||||
|  | 
 | ||||||
|  |     def test_build_in_context_abs_dockerfile(self): | ||||||
|  |         base_dir = tempfile.mkdtemp() | ||||||
|  |         self.addCleanup(shutil.rmtree, base_dir) | ||||||
|  |         abs_dockerfile_path = os.path.join(base_dir, 'custom.dockerfile') | ||||||
|  |         with open(os.path.join(base_dir, 'file.txt'), 'w') as f: | ||||||
|  |             f.write('hello world') | ||||||
|  |         with open(abs_dockerfile_path, 'w') as df: | ||||||
|  |             df.write('\n'.join([ | ||||||
|  |                 'FROM busybox', | ||||||
|  |                 'COPY . /src', | ||||||
|  |                 'WORKDIR /src', | ||||||
|  |             ])) | ||||||
|  |         img_name = random_name() | ||||||
|  |         self.tmp_imgs.append(img_name) | ||||||
|  |         stream = self.client.build( | ||||||
|  |             path=base_dir, dockerfile=abs_dockerfile_path, tag=img_name, | ||||||
|  |             decode=True | ||||||
|  |         ) | ||||||
|  |         lines = [] | ||||||
|  |         for chunk in stream: | ||||||
|  |             lines.append(chunk) | ||||||
|  |         assert 'Successfully tagged' in lines[-1]['stream'] | ||||||
|  | 
 | ||||||
|  |         ctnr = self.client.create_container(img_name, 'ls -a') | ||||||
|  |         self.tmp_containers.append(ctnr) | ||||||
|  |         self.client.start(ctnr) | ||||||
|  |         lsdata = self.client.logs(ctnr).strip().split(b'\n') | ||||||
|  |         assert len(lsdata) == 4 | ||||||
|  |         assert sorted( | ||||||
|  |             [b'.', b'..', b'file.txt', b'custom.dockerfile'] | ||||||
|  |         ) == sorted(lsdata) | ||||||
|  | 
 | ||||||
|  |     @requires_api_version('1.31') | ||||||
|  |     @pytest.mark.xfail( | ||||||
|  |         True, | ||||||
|  |         reason='Currently fails on 18.09: ' | ||||||
|  |                'https://github.com/moby/moby/issues/37920' | ||||||
|  |     ) | ||||||
|  |     def test_prune_builds(self): | ||||||
|  |         prune_result = self.client.prune_builds() | ||||||
|  |         assert 'SpaceReclaimed' in prune_result | ||||||
|  |         assert isinstance(prune_result['SpaceReclaimed'], int) | ||||||
|  | @ -0,0 +1,127 @@ | ||||||
|  | import os | ||||||
|  | import shutil | ||||||
|  | import unittest | ||||||
|  | 
 | ||||||
|  | import docker | ||||||
|  | from .. import helpers | ||||||
|  | from docker.utils import kwargs_from_env | ||||||
|  | 
 | ||||||
|  | TEST_IMG = 'alpine:3.10' | ||||||
|  | TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class BaseIntegrationTest(unittest.TestCase): | ||||||
|  |     """ | ||||||
|  |     A base class for integration test cases. It cleans up the Docker server | ||||||
|  |     after itself. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def setUp(self): | ||||||
|  |         self.tmp_imgs = [] | ||||||
|  |         self.tmp_containers = [] | ||||||
|  |         self.tmp_folders = [] | ||||||
|  |         self.tmp_volumes = [] | ||||||
|  |         self.tmp_networks = [] | ||||||
|  |         self.tmp_plugins = [] | ||||||
|  |         self.tmp_secrets = [] | ||||||
|  |         self.tmp_configs = [] | ||||||
|  | 
 | ||||||
|  |     def tearDown(self): | ||||||
|  |         client = docker.from_env(version=TEST_API_VERSION, use_ssh_client=True) | ||||||
|  |         try: | ||||||
|  |             for img in self.tmp_imgs: | ||||||
|  |                 try: | ||||||
|  |                     client.api.remove_image(img) | ||||||
|  |                 except docker.errors.APIError: | ||||||
|  |                     pass | ||||||
|  |             for container in self.tmp_containers: | ||||||
|  |                 try: | ||||||
|  |                     client.api.remove_container(container, force=True, v=True) | ||||||
|  |                 except docker.errors.APIError: | ||||||
|  |                     pass | ||||||
|  |             for network in self.tmp_networks: | ||||||
|  |                 try: | ||||||
|  |                     client.api.remove_network(network) | ||||||
|  |                 except docker.errors.APIError: | ||||||
|  |                     pass | ||||||
|  |             for volume in self.tmp_volumes: | ||||||
|  |                 try: | ||||||
|  |                     client.api.remove_volume(volume) | ||||||
|  |                 except docker.errors.APIError: | ||||||
|  |                     pass | ||||||
|  | 
 | ||||||
|  |             for secret in self.tmp_secrets: | ||||||
|  |                 try: | ||||||
|  |                     client.api.remove_secret(secret) | ||||||
|  |                 except docker.errors.APIError: | ||||||
|  |                     pass | ||||||
|  | 
 | ||||||
|  |             for config in self.tmp_configs: | ||||||
|  |                 try: | ||||||
|  |                     client.api.remove_config(config) | ||||||
|  |                 except docker.errors.APIError: | ||||||
|  |                     pass | ||||||
|  | 
 | ||||||
|  |             for folder in self.tmp_folders: | ||||||
|  |                 shutil.rmtree(folder) | ||||||
|  |         finally: | ||||||
|  |             client.close() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class BaseAPIIntegrationTest(BaseIntegrationTest): | ||||||
|  |     """ | ||||||
|  |     A test case for `APIClient` integration tests. It sets up an `APIClient` | ||||||
|  |     as `self.client`. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def setUp(self): | ||||||
|  |         super(BaseAPIIntegrationTest, self).setUp() | ||||||
|  |         self.client = self.get_client_instance() | ||||||
|  | 
 | ||||||
|  |     def tearDown(self): | ||||||
|  |         super(BaseAPIIntegrationTest, self).tearDown() | ||||||
|  |         self.client.close() | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def get_client_instance(): | ||||||
|  |         return docker.APIClient( | ||||||
|  |             version=TEST_API_VERSION, timeout=60, use_ssh_client=True, **kwargs_from_env() | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def _init_swarm(client, **kwargs): | ||||||
|  |         return client.init_swarm( | ||||||
|  |             '127.0.0.1', listen_addr=helpers.swarm_listen_addr(), **kwargs | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     def run_container(self, *args, **kwargs): | ||||||
|  |         container = self.client.create_container(*args, **kwargs) | ||||||
|  |         self.tmp_containers.append(container) | ||||||
|  |         self.client.start(container) | ||||||
|  |         exitcode = self.client.wait(container)['StatusCode'] | ||||||
|  | 
 | ||||||
|  |         if exitcode != 0: | ||||||
|  |             output = self.client.logs(container) | ||||||
|  |             raise Exception( | ||||||
|  |                 "Container exited with code {}:\n{}" | ||||||
|  |                 .format(exitcode, output)) | ||||||
|  | 
 | ||||||
|  |         return container | ||||||
|  | 
 | ||||||
|  |     def create_and_start(self, image=TEST_IMG, command='top', **kwargs): | ||||||
|  |         container = self.client.create_container( | ||||||
|  |             image=image, command=command, **kwargs) | ||||||
|  |         self.tmp_containers.append(container) | ||||||
|  |         self.client.start(container) | ||||||
|  |         return container | ||||||
|  | 
 | ||||||
|  |     def execute(self, container, cmd, exit_code=0, **kwargs): | ||||||
|  |         exc = self.client.exec_create(container, cmd, **kwargs) | ||||||
|  |         output = self.client.exec_start(exc) | ||||||
|  |         actual_exit_code = self.client.exec_inspect(exc)['ExitCode'] | ||||||
|  |         msg = "Expected `{}` to exit with code {} but returned {}:\n{}".format( | ||||||
|  |             " ".join(cmd), exit_code, actual_exit_code, output) | ||||||
|  |         assert actual_exit_code == exit_code, msg | ||||||
|  | 
 | ||||||
|  |     def init_swarm(self, **kwargs): | ||||||
|  |         return self._init_swarm(self.client, **kwargs) | ||||||
		Loading…
	
		Reference in New Issue