Merge pull request #210 from aanand/universally-compatible-chunking

Universally-compatible reading of chunked streams
This commit is contained in:
Maxime Petazzoni 2014-05-21 11:06:23 -07:00
commit 55b93d676b
2 changed files with 20 additions and 6 deletions

View File

@ -220,9 +220,23 @@ class Client(requests.Session):
def _stream_helper(self, response):
"""Generator for data coming from a chunked-encoded HTTP response."""
for line in response.iter_lines(chunk_size=32):
if line:
yield line
socket_fp = self._get_raw_response_socket(response)
socket_fp.setblocking(1)
socket = socket_fp.makefile()
while True:
# Because Docker introduced newlines at the end of chunks in v0.9,
# and only on some API endpoints, we have to cater for both cases.
size_line = socket.readline()
if size_line == '\r\n':
size_line = socket.readline()
size = int(size_line, 16)
if size <= 0:
break
data = socket.readline()
if not data:
break
yield data
def _multiplexed_buffer_helper(self, response):
"""A generator of multiplexed data blocks read from a buffered

View File

@ -14,6 +14,7 @@
import time
import base64
import json
import io
import os
import signal
@ -674,10 +675,8 @@ class TestPullStream(BaseTestCase):
self.assertIn('Images', info)
img_count = info['Images']
stream = self.client.pull('joffrey/test001', stream=True)
res = u''
for chunk in stream:
res += chunk
self.assertEqual(type(res), six.text_type)
json.loads(chunk) # ensure chunk is a single, valid JSON blob
self.assertEqual(img_count + 3, self.client.info()['Images'])
img_info = self.client.inspect_image('joffrey/test001')
self.assertIn('id', img_info)
@ -770,6 +769,7 @@ class TestBuildStream(BaseTestCase):
stream = self.client.build(fileobj=script, stream=True)
logs = ''
for chunk in stream:
json.loads(chunk) # ensure chunk is a single, valid JSON blob
logs += chunk
self.assertNotEqual(logs, '')