diff --git a/docker/client.py b/docker/client.py index d47dda70..95dea5b4 100644 --- a/docker/client.py +++ b/docker/client.py @@ -220,9 +220,23 @@ class Client(requests.Session): def _stream_helper(self, response): """Generator for data coming from a chunked-encoded HTTP response.""" - for line in response.iter_lines(chunk_size=32): - if line: - yield line + socket_fp = self._get_raw_response_socket(response) + socket_fp.setblocking(1) + socket = socket_fp.makefile() + while True: + # Because Docker introduced newlines at the end of chunks in v0.9, + # and only on some API endpoints, we have to cater for both cases. + size_line = socket.readline() + if size_line == '\r\n': + size_line = socket.readline() + + size = int(size_line, 16) + if size <= 0: + break + data = socket.readline() + if not data: + break + yield data def _multiplexed_buffer_helper(self, response): """A generator of multiplexed data blocks read from a buffered diff --git a/tests/integration_test.py b/tests/integration_test.py index 6761725e..bd03642a 100644 --- a/tests/integration_test.py +++ b/tests/integration_test.py @@ -14,6 +14,7 @@ import time import base64 +import json import io import os import signal @@ -674,10 +675,8 @@ class TestPullStream(BaseTestCase): self.assertIn('Images', info) img_count = info['Images'] stream = self.client.pull('joffrey/test001', stream=True) - res = u'' for chunk in stream: - res += chunk - self.assertEqual(type(res), six.text_type) + json.loads(chunk) # ensure chunk is a single, valid JSON blob self.assertEqual(img_count + 3, self.client.info()['Images']) img_info = self.client.inspect_image('joffrey/test001') self.assertIn('id', img_info) @@ -770,6 +769,7 @@ class TestBuildStream(BaseTestCase): stream = self.client.build(fileobj=script, stream=True) logs = '' for chunk in stream: + json.loads(chunk) # ensure chunk is a single, valid JSON blob logs += chunk self.assertNotEqual(logs, '')