mirror of https://github.com/docker/docker-py.git
Move exec_run example to user guides section of docs
Signed-off-by: Joffrey F <joffrey@docker.com>
This commit is contained in:
parent
b2ad302636
commit
16c28093b9
|
@ -181,70 +181,6 @@ class Container(Model):
|
|||
Raises:
|
||||
:py:class:`docker.errors.APIError`
|
||||
If the server returns an error.
|
||||
|
||||
Example:
|
||||
|
||||
Create a container that runs in the background
|
||||
|
||||
>>> client = docker.from_env()
|
||||
>>> container = client.containers.run(
|
||||
... 'bfirsh/reticulate-splines', detach=True)
|
||||
|
||||
Prepare the command we are going to use. It prints "hello stdout"
|
||||
in `stdout`, followed by "hello stderr" in `stderr`:
|
||||
|
||||
>>> cmd = '/bin/sh -c "echo hello stdout ; echo hello stderr >&2"'
|
||||
|
||||
We'll run this command with all four the combinations of ``stream``
|
||||
and ``demux``.
|
||||
|
||||
With ``stream=False`` and ``demux=False``, the output is a string
|
||||
that contains both the `stdout` and the `stderr` output:
|
||||
|
||||
>>> res = container.exec_run(cmd, stream=False, demux=False)
|
||||
>>> res.output
|
||||
b'hello stderr\nhello stdout\n'
|
||||
|
||||
With ``stream=True``, and ``demux=False``, the output is a
|
||||
generator that yields strings containing the output of both
|
||||
`stdout` and `stderr`:
|
||||
|
||||
>>> res = container.exec_run(cmd, stream=True, demux=False)
|
||||
>>> next(res.output)
|
||||
b'hello stdout\n'
|
||||
>>> next(res.output)
|
||||
b'hello stderr\n'
|
||||
>>> next(res.output)
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
||||
|
||||
With ``stream=True`` and ``demux=True``, the generator now
|
||||
separates the streams, and yield tuples
|
||||
``(stdout, stderr)``:
|
||||
|
||||
>>> res = container.exec_run(cmd, stream=True, demux=True)
|
||||
>>> next(res.output)
|
||||
(b'hello stdout\n', None)
|
||||
>>> next(res.output)
|
||||
(None, b'hello stderr\n')
|
||||
>>> next(res.output)
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
||||
|
||||
Finally, with ``stream=False`` and ``demux=True``, the whole output
|
||||
is returned, but the streams are still separated:
|
||||
|
||||
>>> res = container.exec_run(cmd, stream=True, demux=True)
|
||||
>>> next(res.output)
|
||||
(b'hello stdout\n', None)
|
||||
>>> next(res.output)
|
||||
(None, b'hello stderr\n')
|
||||
>>> next(res.output)
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
||||
"""
|
||||
resp = self.client.api.exec_create(
|
||||
self.id, cmd, stdout=stdout, stderr=stderr, stdin=stdin, tty=tty,
|
||||
|
|
|
@ -92,4 +92,5 @@ That's just a taste of what you can do with the Docker SDK for Python. For more,
|
|||
volumes
|
||||
api
|
||||
tls
|
||||
user_guides/index
|
||||
change-log
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
User guides and tutorials
|
||||
=========================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
multiplex
|
||||
swarm_services
|
|
@ -0,0 +1,66 @@
|
|||
Handling multiplexed streams
|
||||
============================
|
||||
|
||||
.. note::
|
||||
The following instruction assume you're interested in getting output from
|
||||
an ``exec`` command. These instruction are similarly applicable to the
|
||||
output of ``attach``.
|
||||
|
||||
First create a container that runs in the background:
|
||||
|
||||
>>> client = docker.from_env()
|
||||
>>> container = client.containers.run(
|
||||
... 'bfirsh/reticulate-splines', detach=True)
|
||||
|
||||
Prepare the command we are going to use. It prints "hello stdout"
|
||||
in `stdout`, followed by "hello stderr" in `stderr`:
|
||||
|
||||
>>> cmd = '/bin/sh -c "echo hello stdout ; echo hello stderr >&2"'
|
||||
We'll run this command with all four the combinations of ``stream``
|
||||
and ``demux``.
|
||||
With ``stream=False`` and ``demux=False``, the output is a string
|
||||
that contains both the `stdout` and the `stderr` output:
|
||||
>>> res = container.exec_run(cmd, stream=False, demux=False)
|
||||
>>> res.output
|
||||
b'hello stderr\nhello stdout\n'
|
||||
|
||||
With ``stream=True``, and ``demux=False``, the output is a
|
||||
generator that yields strings containing the output of both
|
||||
`stdout` and `stderr`:
|
||||
|
||||
>>> res = container.exec_run(cmd, stream=True, demux=False)
|
||||
>>> next(res.output)
|
||||
b'hello stdout\n'
|
||||
>>> next(res.output)
|
||||
b'hello stderr\n'
|
||||
>>> next(res.output)
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
||||
|
||||
With ``stream=True`` and ``demux=True``, the generator now
|
||||
separates the streams, and yield tuples
|
||||
``(stdout, stderr)``:
|
||||
|
||||
>>> res = container.exec_run(cmd, stream=True, demux=True)
|
||||
>>> next(res.output)
|
||||
(b'hello stdout\n', None)
|
||||
>>> next(res.output)
|
||||
(None, b'hello stderr\n')
|
||||
>>> next(res.output)
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
||||
|
||||
Finally, with ``stream=False`` and ``demux=True``, the whole output
|
||||
is returned, but the streams are still separated:
|
||||
|
||||
>>> res = container.exec_run(cmd, stream=True, demux=True)
|
||||
>>> next(res.output)
|
||||
(b'hello stdout\n', None)
|
||||
>>> next(res.output)
|
||||
(None, b'hello stderr\n')
|
||||
>>> next(res.output)
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
|
@ -1,5 +1,9 @@
|
|||
# Swarm services
|
||||
|
||||
> Warning:
|
||||
> This is a stale document and may contain outdated information.
|
||||
> Refer to the API docs for updated classes and method signatures.
|
||||
|
||||
Starting with Engine version 1.12 (API 1.24), it is possible to manage services
|
||||
using the Docker Engine API. Note that the engine needs to be part of a
|
||||
[Swarm cluster](../swarm.rst) before you can use the service-related methods.
|
||||
|
|
Loading…
Reference in New Issue