mirror of https://github.com/docker/docker-py.git
				
				
				
			Merge pull request #210 from aanand/universally-compatible-chunking
Universally-compatible reading of chunked streams
This commit is contained in:
		
						commit
						55b93d676b
					
				|  | @ -220,9 +220,23 @@ class Client(requests.Session): | |||
| 
 | ||||
|     def _stream_helper(self, response): | ||||
|         """Generator for data coming from a chunked-encoded HTTP response.""" | ||||
|         for line in response.iter_lines(chunk_size=32): | ||||
|             if line: | ||||
|                 yield line | ||||
|         socket_fp = self._get_raw_response_socket(response) | ||||
|         socket_fp.setblocking(1) | ||||
|         socket = socket_fp.makefile() | ||||
|         while True: | ||||
|             # Because Docker introduced newlines at the end of chunks in v0.9, | ||||
|             # and only on some API endpoints, we have to cater for both cases. | ||||
|             size_line = socket.readline() | ||||
|             if size_line == '\r\n': | ||||
|                 size_line = socket.readline() | ||||
| 
 | ||||
|             size = int(size_line, 16) | ||||
|             if size <= 0: | ||||
|                 break | ||||
|             data = socket.readline() | ||||
|             if not data: | ||||
|                 break | ||||
|             yield data | ||||
| 
 | ||||
|     def _multiplexed_buffer_helper(self, response): | ||||
|         """A generator of multiplexed data blocks read from a buffered | ||||
|  |  | |||
|  | @ -14,6 +14,7 @@ | |||
| 
 | ||||
| import time | ||||
| import base64 | ||||
| import json | ||||
| import io | ||||
| import os | ||||
| import signal | ||||
|  | @ -674,10 +675,8 @@ class TestPullStream(BaseTestCase): | |||
|         self.assertIn('Images', info) | ||||
|         img_count = info['Images'] | ||||
|         stream = self.client.pull('joffrey/test001', stream=True) | ||||
|         res = u'' | ||||
|         for chunk in stream: | ||||
|             res += chunk | ||||
|         self.assertEqual(type(res), six.text_type) | ||||
|             json.loads(chunk)  # ensure chunk is a single, valid JSON blob | ||||
|         self.assertEqual(img_count + 3, self.client.info()['Images']) | ||||
|         img_info = self.client.inspect_image('joffrey/test001') | ||||
|         self.assertIn('id', img_info) | ||||
|  | @ -770,6 +769,7 @@ class TestBuildStream(BaseTestCase): | |||
|         stream = self.client.build(fileobj=script, stream=True) | ||||
|         logs = '' | ||||
|         for chunk in stream: | ||||
|             json.loads(chunk)  # ensure chunk is a single, valid JSON blob | ||||
|             logs += chunk | ||||
|         self.assertNotEqual(logs, '') | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue