mirror of https://github.com/docker/docker-py.git
Fix Ruff B904s (be explicit about exception causes)
Signed-off-by: Aarni Koskela <akx@iki.fi>
This commit is contained in:
parent
8447f7b0f0
commit
6aec90a41b
|
|
@ -160,10 +160,10 @@ class APIClient(
|
||||||
base_url, timeout, pool_connections=num_pools,
|
base_url, timeout, pool_connections=num_pools,
|
||||||
max_pool_size=max_pool_size
|
max_pool_size=max_pool_size
|
||||||
)
|
)
|
||||||
except NameError:
|
except NameError as err:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
'Install pypiwin32 package to enable npipe:// support'
|
'Install pypiwin32 package to enable npipe:// support'
|
||||||
)
|
) from err
|
||||||
self.mount('http+docker://', self._custom_adapter)
|
self.mount('http+docker://', self._custom_adapter)
|
||||||
self.base_url = 'http+docker://localnpipe'
|
self.base_url = 'http+docker://localnpipe'
|
||||||
elif base_url.startswith('ssh://'):
|
elif base_url.startswith('ssh://'):
|
||||||
|
|
@ -172,10 +172,10 @@ class APIClient(
|
||||||
base_url, timeout, pool_connections=num_pools,
|
base_url, timeout, pool_connections=num_pools,
|
||||||
max_pool_size=max_pool_size, shell_out=use_ssh_client
|
max_pool_size=max_pool_size, shell_out=use_ssh_client
|
||||||
)
|
)
|
||||||
except NameError:
|
except NameError as err:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
'Install paramiko package to enable ssh:// support'
|
'Install paramiko package to enable ssh:// support'
|
||||||
)
|
) from err
|
||||||
self.mount('http+docker://ssh', self._custom_adapter)
|
self.mount('http+docker://ssh', self._custom_adapter)
|
||||||
self._unmount('http://', 'https://')
|
self._unmount('http://', 'https://')
|
||||||
self.base_url = 'http+docker://ssh'
|
self.base_url = 'http+docker://ssh'
|
||||||
|
|
@ -211,15 +211,15 @@ class APIClient(
|
||||||
def _retrieve_server_version(self):
|
def _retrieve_server_version(self):
|
||||||
try:
|
try:
|
||||||
return self.version(api_version=False)["ApiVersion"]
|
return self.version(api_version=False)["ApiVersion"]
|
||||||
except KeyError:
|
except KeyError as ke:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
'Invalid response from docker daemon: key "ApiVersion"'
|
'Invalid response from docker daemon: key "ApiVersion"'
|
||||||
' is missing.'
|
' is missing.'
|
||||||
)
|
) from ke
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
f'Error while fetching server API version: {e}'
|
f'Error while fetching server API version: {e}'
|
||||||
)
|
) from e
|
||||||
|
|
||||||
def _set_request_timeout(self, kwargs):
|
def _set_request_timeout(self, kwargs):
|
||||||
"""Prepare the kwargs for an HTTP request by inserting the timeout
|
"""Prepare the kwargs for an HTTP request by inserting the timeout
|
||||||
|
|
|
||||||
|
|
@ -268,7 +268,7 @@ class AuthConfig(dict):
|
||||||
except credentials.StoreError as e:
|
except credentials.StoreError as e:
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
f'Credentials store error: {repr(e)}'
|
f'Credentials store error: {repr(e)}'
|
||||||
)
|
) from e
|
||||||
|
|
||||||
def _get_store_instance(self, name):
|
def _get_store_instance(self, name):
|
||||||
if name not in self._stores:
|
if name not in self._stores:
|
||||||
|
|
|
||||||
|
|
@ -114,7 +114,7 @@ class ContextAPI:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise errors.ContextException(
|
raise errors.ContextException(
|
||||||
f"Failed to load metafile {filename}: {e}",
|
f"Failed to load metafile {filename}: {e}",
|
||||||
)
|
) from e
|
||||||
|
|
||||||
contexts = [cls.DEFAULT_CONTEXT]
|
contexts = [cls.DEFAULT_CONTEXT]
|
||||||
for name in names:
|
for name in names:
|
||||||
|
|
|
||||||
|
|
@ -99,7 +99,7 @@ class Context:
|
||||||
# unknown format
|
# unknown format
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"Detected corrupted meta file for context {name} : {e}"
|
f"Detected corrupted meta file for context {name} : {e}"
|
||||||
)
|
) from e
|
||||||
|
|
||||||
# for docker endpoints, set defaults for
|
# for docker endpoints, set defaults for
|
||||||
# Host and SkipTLSVerify fields
|
# Host and SkipTLSVerify fields
|
||||||
|
|
|
||||||
|
|
@ -80,14 +80,14 @@ class Store:
|
||||||
[self.exe, subcmd], input=data_input, env=env,
|
[self.exe, subcmd], input=data_input, env=env,
|
||||||
)
|
)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise errors.process_store_error(e, self.program)
|
raise errors.process_store_error(e, self.program) from e
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno == errno.ENOENT:
|
if e.errno == errno.ENOENT:
|
||||||
raise errors.StoreError(
|
raise errors.StoreError(
|
||||||
f'{self.program} not installed or not available in PATH'
|
f'{self.program} not installed or not available in PATH'
|
||||||
)
|
) from e
|
||||||
else:
|
else:
|
||||||
raise errors.StoreError(
|
raise errors.StoreError(
|
||||||
f'Unexpected OS error "{e.strerror}", errno={e.errno}'
|
f'Unexpected OS error "{e.strerror}", errno={e.errno}'
|
||||||
)
|
) from e
|
||||||
return output
|
return output
|
||||||
|
|
|
||||||
|
|
@ -47,11 +47,11 @@ class Container(Model):
|
||||||
try:
|
try:
|
||||||
result = self.attrs['Config'].get('Labels')
|
result = self.attrs['Config'].get('Labels')
|
||||||
return result or {}
|
return result or {}
|
||||||
except KeyError:
|
except KeyError as ke:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
'Label data is not available for sparse objects. Call reload()'
|
'Label data is not available for sparse objects. Call reload()'
|
||||||
' to retrieve all information'
|
' to retrieve all information'
|
||||||
)
|
) from ke
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self):
|
def status(self):
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ class TLSConfig:
|
||||||
raise errors.TLSParameterError(
|
raise errors.TLSParameterError(
|
||||||
'client_cert must be a tuple of'
|
'client_cert must be a tuple of'
|
||||||
' (client certificate, key file)'
|
' (client certificate, key file)'
|
||||||
)
|
) from None
|
||||||
|
|
||||||
if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or
|
if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or
|
||||||
not os.path.isfile(tls_key)):
|
not os.path.isfile(tls_key)):
|
||||||
|
|
|
||||||
|
|
@ -46,9 +46,8 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||||
conn = None
|
conn = None
|
||||||
try:
|
try:
|
||||||
conn = self.pool.get(block=self.block, timeout=timeout)
|
conn = self.pool.get(block=self.block, timeout=timeout)
|
||||||
|
except AttributeError as ae: # self.pool is None
|
||||||
except AttributeError: # self.pool is None
|
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae
|
||||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
|
|
||||||
|
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
if self.block:
|
if self.block:
|
||||||
|
|
@ -56,7 +55,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||||
self,
|
self,
|
||||||
"Pool reached maximum size and no more "
|
"Pool reached maximum size and no more "
|
||||||
"connections are allowed."
|
"connections are allowed."
|
||||||
)
|
) from None
|
||||||
# Oh well, we'll create a new connection then
|
# Oh well, we'll create a new connection then
|
||||||
|
|
||||||
return conn or self._new_conn()
|
return conn or self._new_conn()
|
||||||
|
|
|
||||||
|
|
@ -141,8 +141,8 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||||
try:
|
try:
|
||||||
conn = self.pool.get(block=self.block, timeout=timeout)
|
conn = self.pool.get(block=self.block, timeout=timeout)
|
||||||
|
|
||||||
except AttributeError: # self.pool is None
|
except AttributeError as ae: # self.pool is None
|
||||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
|
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae
|
||||||
|
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
if self.block:
|
if self.block:
|
||||||
|
|
@ -150,7 +150,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||||
self,
|
self,
|
||||||
"Pool reached maximum size and no more "
|
"Pool reached maximum size and no more "
|
||||||
"connections are allowed."
|
"connections are allowed."
|
||||||
)
|
) from None
|
||||||
# Oh well, we'll create a new connection then
|
# Oh well, we'll create a new connection then
|
||||||
|
|
||||||
return conn or self._new_conn()
|
return conn or self._new_conn()
|
||||||
|
|
|
||||||
|
|
@ -28,9 +28,9 @@ class CancellableStream:
|
||||||
try:
|
try:
|
||||||
return next(self._stream)
|
return next(self._stream)
|
||||||
except urllib3.exceptions.ProtocolError:
|
except urllib3.exceptions.ProtocolError:
|
||||||
raise StopIteration
|
raise StopIteration from None
|
||||||
except OSError:
|
except OSError:
|
||||||
raise StopIteration
|
raise StopIteration from None
|
||||||
|
|
||||||
next = __next__
|
next = __next__
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -93,10 +93,10 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
|
||||||
try:
|
try:
|
||||||
with open(full_path, 'rb') as f:
|
with open(full_path, 'rb') as f:
|
||||||
t.addfile(i, f)
|
t.addfile(i, f)
|
||||||
except OSError:
|
except OSError as oe:
|
||||||
raise OSError(
|
raise OSError(
|
||||||
f'Can not read file in context: {full_path}'
|
f'Can not read file in context: {full_path}'
|
||||||
)
|
) from oe
|
||||||
else:
|
else:
|
||||||
# Directories, FIFOs, symlinks... don't need to be read.
|
# Directories, FIFOs, symlinks... don't need to be read.
|
||||||
t.addfile(i, None)
|
t.addfile(i, None)
|
||||||
|
|
|
||||||
|
|
@ -72,4 +72,4 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
|
||||||
try:
|
try:
|
||||||
yield decoder(buffered)
|
yield decoder(buffered)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise StreamParseError(e)
|
raise StreamParseError(e) from e
|
||||||
|
|
|
||||||
|
|
@ -414,11 +414,11 @@ def parse_bytes(s):
|
||||||
if suffix in units.keys() or suffix.isdigit():
|
if suffix in units.keys() or suffix.isdigit():
|
||||||
try:
|
try:
|
||||||
digits = float(digits_part)
|
digits = float(digits_part)
|
||||||
except ValueError:
|
except ValueError as ve:
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
'Failed converting the string value for memory '
|
'Failed converting the string value for memory '
|
||||||
f'({digits_part}) to an integer.'
|
f'({digits_part}) to an integer.'
|
||||||
)
|
) from ve
|
||||||
|
|
||||||
# Reconvert to long for the final result
|
# Reconvert to long for the final result
|
||||||
s = int(digits * units[suffix])
|
s = int(digits * units[suffix])
|
||||||
|
|
|
||||||
|
|
@ -778,8 +778,8 @@ class InMemoryStore(credentials.Store):
|
||||||
def get(self, server):
|
def get(self, server):
|
||||||
try:
|
try:
|
||||||
return self.__store[server]
|
return self.__store[server]
|
||||||
except KeyError:
|
except KeyError as ke:
|
||||||
raise credentials.errors.CredentialsNotFound()
|
raise credentials.errors.CredentialsNotFound() from ke
|
||||||
|
|
||||||
def store(self, server, username, secret):
|
def store(self, server, username, secret):
|
||||||
self.__store[server] = {
|
self.__store[server] = {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue