mirror of https://github.com/docker/docker-py.git
Universally-compatible reading of chunked streams
Docker introduced newlines in stream output in version 0.9 (https://github.com/dotcloud/docker/pull/4276), but not to all endpoints - POST /images/create, for example, does not include them. This reverts to the old, less pleasant implementation of _stream_helper(), with a manual check for newlines to fix the problem described in #176 and fixed in #184, without the accompanying regression. It should work against Docker 0.8, 0.9 and 0.10, both when building and when pulling.
This commit is contained in:
parent
73434476b3
commit
073fdaf671
|
|
@ -209,9 +209,23 @@ class Client(requests.Session):
|
||||||
|
|
||||||
def _stream_helper(self, response):
|
def _stream_helper(self, response):
|
||||||
"""Generator for data coming from a chunked-encoded HTTP response."""
|
"""Generator for data coming from a chunked-encoded HTTP response."""
|
||||||
for line in response.iter_lines(chunk_size=32):
|
socket_fp = self._get_raw_response_socket(response)
|
||||||
if line:
|
socket_fp.setblocking(1)
|
||||||
yield line
|
socket = socket_fp.makefile()
|
||||||
|
while True:
|
||||||
|
# Because Docker introduced newlines at the end of chunks in v0.9,
|
||||||
|
# and only on some API endpoints, we have to cater for both cases.
|
||||||
|
size_line = socket.readline()
|
||||||
|
if size_line == '\r\n':
|
||||||
|
size_line = socket.readline()
|
||||||
|
|
||||||
|
size = int(size_line, 16)
|
||||||
|
if size <= 0:
|
||||||
|
break
|
||||||
|
data = socket.readline()
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
yield data
|
||||||
|
|
||||||
def _multiplexed_buffer_helper(self, response):
|
def _multiplexed_buffer_helper(self, response):
|
||||||
"""A generator of multiplexed data blocks read from a buffered
|
"""A generator of multiplexed data blocks read from a buffered
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import base64
|
import base64
|
||||||
|
import json
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
|
@ -666,10 +667,8 @@ class TestPullStream(BaseTestCase):
|
||||||
self.assertIn('Images', info)
|
self.assertIn('Images', info)
|
||||||
img_count = info['Images']
|
img_count = info['Images']
|
||||||
stream = self.client.pull('joffrey/test001', stream=True)
|
stream = self.client.pull('joffrey/test001', stream=True)
|
||||||
res = u''
|
|
||||||
for chunk in stream:
|
for chunk in stream:
|
||||||
res += chunk
|
json.loads(chunk) # ensure chunk is a single, valid JSON blob
|
||||||
self.assertEqual(type(res), six.text_type)
|
|
||||||
self.assertEqual(img_count + 3, self.client.info()['Images'])
|
self.assertEqual(img_count + 3, self.client.info()['Images'])
|
||||||
img_info = self.client.inspect_image('joffrey/test001')
|
img_info = self.client.inspect_image('joffrey/test001')
|
||||||
self.assertIn('id', img_info)
|
self.assertIn('id', img_info)
|
||||||
|
|
@ -762,6 +761,7 @@ class TestBuildStream(BaseTestCase):
|
||||||
stream = self.client.build(fileobj=script, stream=True)
|
stream = self.client.build(fileobj=script, stream=True)
|
||||||
logs = ''
|
logs = ''
|
||||||
for chunk in stream:
|
for chunk in stream:
|
||||||
|
json.loads(chunk) # ensure chunk is a single, valid JSON blob
|
||||||
logs += chunk
|
logs += chunk
|
||||||
self.assertNotEqual(logs, '')
|
self.assertNotEqual(logs, '')
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue