mirror of https://github.com/docker/docker-py.git
				
				
				
			Universally-compatible reading of chunked streams
Docker introduced newlines in stream output in version 0.9 (https://github.com/dotcloud/docker/pull/4276), but not to all endpoints - POST /images/create, for example, does not include them. This reverts to the old, less pleasant implementation of _stream_helper(), with a manual check for newlines to fix the problem described in #176 and fixed in #184, without the accompanying regression. It should work against Docker 0.8, 0.9 and 0.10, both when building and when pulling.
This commit is contained in:
		
							parent
							
								
									73434476b3
								
							
						
					
					
						commit
						073fdaf671
					
				|  | @ -209,9 +209,23 @@ class Client(requests.Session): | |||
| 
 | ||||
|     def _stream_helper(self, response): | ||||
|         """Generator for data coming from a chunked-encoded HTTP response.""" | ||||
|         for line in response.iter_lines(chunk_size=32): | ||||
|             if line: | ||||
|                 yield line | ||||
|         socket_fp = self._get_raw_response_socket(response) | ||||
|         socket_fp.setblocking(1) | ||||
|         socket = socket_fp.makefile() | ||||
|         while True: | ||||
|             # Because Docker introduced newlines at the end of chunks in v0.9, | ||||
|             # and only on some API endpoints, we have to cater for both cases. | ||||
|             size_line = socket.readline() | ||||
|             if size_line == '\r\n': | ||||
|                 size_line = socket.readline() | ||||
| 
 | ||||
|             size = int(size_line, 16) | ||||
|             if size <= 0: | ||||
|                 break | ||||
|             data = socket.readline() | ||||
|             if not data: | ||||
|                 break | ||||
|             yield data | ||||
| 
 | ||||
|     def _multiplexed_buffer_helper(self, response): | ||||
|         """A generator of multiplexed data blocks read from a buffered | ||||
|  |  | |||
|  | @ -14,6 +14,7 @@ | |||
| 
 | ||||
| import time | ||||
| import base64 | ||||
| import json | ||||
| import io | ||||
| import os | ||||
| import signal | ||||
|  | @ -666,10 +667,8 @@ class TestPullStream(BaseTestCase): | |||
|         self.assertIn('Images', info) | ||||
|         img_count = info['Images'] | ||||
|         stream = self.client.pull('joffrey/test001', stream=True) | ||||
|         res = u'' | ||||
|         for chunk in stream: | ||||
|             res += chunk | ||||
|         self.assertEqual(type(res), six.text_type) | ||||
|             json.loads(chunk)  # ensure chunk is a single, valid JSON blob | ||||
|         self.assertEqual(img_count + 3, self.client.info()['Images']) | ||||
|         img_info = self.client.inspect_image('joffrey/test001') | ||||
|         self.assertIn('id', img_info) | ||||
|  | @ -762,6 +761,7 @@ class TestBuildStream(BaseTestCase): | |||
|         stream = self.client.build(fileobj=script, stream=True) | ||||
|         logs = '' | ||||
|         for chunk in stream: | ||||
|             json.loads(chunk)  # ensure chunk is a single, valid JSON blob | ||||
|             logs += chunk | ||||
|         self.assertNotEqual(logs, '') | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue