mirror of https://github.com/docker/docker-py.git
Fix Ruff B904s (be explicit about exception causes)
Signed-off-by: Aarni Koskela <akx@iki.fi>
This commit is contained in:
parent
8447f7b0f0
commit
6aec90a41b
|
@ -160,10 +160,10 @@ class APIClient(
|
|||
base_url, timeout, pool_connections=num_pools,
|
||||
max_pool_size=max_pool_size
|
||||
)
|
||||
except NameError:
|
||||
except NameError as err:
|
||||
raise DockerException(
|
||||
'Install pypiwin32 package to enable npipe:// support'
|
||||
)
|
||||
) from err
|
||||
self.mount('http+docker://', self._custom_adapter)
|
||||
self.base_url = 'http+docker://localnpipe'
|
||||
elif base_url.startswith('ssh://'):
|
||||
|
@ -172,10 +172,10 @@ class APIClient(
|
|||
base_url, timeout, pool_connections=num_pools,
|
||||
max_pool_size=max_pool_size, shell_out=use_ssh_client
|
||||
)
|
||||
except NameError:
|
||||
except NameError as err:
|
||||
raise DockerException(
|
||||
'Install paramiko package to enable ssh:// support'
|
||||
)
|
||||
) from err
|
||||
self.mount('http+docker://ssh', self._custom_adapter)
|
||||
self._unmount('http://', 'https://')
|
||||
self.base_url = 'http+docker://ssh'
|
||||
|
@ -211,15 +211,15 @@ class APIClient(
|
|||
def _retrieve_server_version(self):
|
||||
try:
|
||||
return self.version(api_version=False)["ApiVersion"]
|
||||
except KeyError:
|
||||
except KeyError as ke:
|
||||
raise DockerException(
|
||||
'Invalid response from docker daemon: key "ApiVersion"'
|
||||
' is missing.'
|
||||
)
|
||||
) from ke
|
||||
except Exception as e:
|
||||
raise DockerException(
|
||||
f'Error while fetching server API version: {e}'
|
||||
)
|
||||
) from e
|
||||
|
||||
def _set_request_timeout(self, kwargs):
|
||||
"""Prepare the kwargs for an HTTP request by inserting the timeout
|
||||
|
|
|
@ -268,7 +268,7 @@ class AuthConfig(dict):
|
|||
except credentials.StoreError as e:
|
||||
raise errors.DockerException(
|
||||
f'Credentials store error: {repr(e)}'
|
||||
)
|
||||
) from e
|
||||
|
||||
def _get_store_instance(self, name):
|
||||
if name not in self._stores:
|
||||
|
|
|
@ -114,7 +114,7 @@ class ContextAPI:
|
|||
except Exception as e:
|
||||
raise errors.ContextException(
|
||||
f"Failed to load metafile {filename}: {e}",
|
||||
)
|
||||
) from e
|
||||
|
||||
contexts = [cls.DEFAULT_CONTEXT]
|
||||
for name in names:
|
||||
|
|
|
@ -99,7 +99,7 @@ class Context:
|
|||
# unknown format
|
||||
raise Exception(
|
||||
f"Detected corrupted meta file for context {name} : {e}"
|
||||
)
|
||||
) from e
|
||||
|
||||
# for docker endpoints, set defaults for
|
||||
# Host and SkipTLSVerify fields
|
||||
|
|
|
@ -80,14 +80,14 @@ class Store:
|
|||
[self.exe, subcmd], input=data_input, env=env,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise errors.process_store_error(e, self.program)
|
||||
raise errors.process_store_error(e, self.program) from e
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
raise errors.StoreError(
|
||||
f'{self.program} not installed or not available in PATH'
|
||||
)
|
||||
) from e
|
||||
else:
|
||||
raise errors.StoreError(
|
||||
f'Unexpected OS error "{e.strerror}", errno={e.errno}'
|
||||
)
|
||||
) from e
|
||||
return output
|
||||
|
|
|
@ -47,11 +47,11 @@ class Container(Model):
|
|||
try:
|
||||
result = self.attrs['Config'].get('Labels')
|
||||
return result or {}
|
||||
except KeyError:
|
||||
except KeyError as ke:
|
||||
raise DockerException(
|
||||
'Label data is not available for sparse objects. Call reload()'
|
||||
' to retrieve all information'
|
||||
)
|
||||
) from ke
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
|
|
|
@ -55,7 +55,7 @@ class TLSConfig:
|
|||
raise errors.TLSParameterError(
|
||||
'client_cert must be a tuple of'
|
||||
' (client certificate, key file)'
|
||||
)
|
||||
) from None
|
||||
|
||||
if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or
|
||||
not os.path.isfile(tls_key)):
|
||||
|
|
|
@ -46,9 +46,8 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
conn = None
|
||||
try:
|
||||
conn = self.pool.get(block=self.block, timeout=timeout)
|
||||
|
||||
except AttributeError: # self.pool is None
|
||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
|
||||
except AttributeError as ae: # self.pool is None
|
||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae
|
||||
|
||||
except queue.Empty:
|
||||
if self.block:
|
||||
|
@ -56,7 +55,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
self,
|
||||
"Pool reached maximum size and no more "
|
||||
"connections are allowed."
|
||||
)
|
||||
) from None
|
||||
# Oh well, we'll create a new connection then
|
||||
|
||||
return conn or self._new_conn()
|
||||
|
|
|
@ -141,8 +141,8 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
try:
|
||||
conn = self.pool.get(block=self.block, timeout=timeout)
|
||||
|
||||
except AttributeError: # self.pool is None
|
||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
|
||||
except AttributeError as ae: # self.pool is None
|
||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae
|
||||
|
||||
except queue.Empty:
|
||||
if self.block:
|
||||
|
@ -150,7 +150,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
|||
self,
|
||||
"Pool reached maximum size and no more "
|
||||
"connections are allowed."
|
||||
)
|
||||
) from None
|
||||
# Oh well, we'll create a new connection then
|
||||
|
||||
return conn or self._new_conn()
|
||||
|
|
|
@ -28,9 +28,9 @@ class CancellableStream:
|
|||
try:
|
||||
return next(self._stream)
|
||||
except urllib3.exceptions.ProtocolError:
|
||||
raise StopIteration
|
||||
raise StopIteration from None
|
||||
except OSError:
|
||||
raise StopIteration
|
||||
raise StopIteration from None
|
||||
|
||||
next = __next__
|
||||
|
||||
|
|
|
@ -93,10 +93,10 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
|
|||
try:
|
||||
with open(full_path, 'rb') as f:
|
||||
t.addfile(i, f)
|
||||
except OSError:
|
||||
except OSError as oe:
|
||||
raise OSError(
|
||||
f'Can not read file in context: {full_path}'
|
||||
)
|
||||
) from oe
|
||||
else:
|
||||
# Directories, FIFOs, symlinks... don't need to be read.
|
||||
t.addfile(i, None)
|
||||
|
|
|
@ -72,4 +72,4 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
|
|||
try:
|
||||
yield decoder(buffered)
|
||||
except Exception as e:
|
||||
raise StreamParseError(e)
|
||||
raise StreamParseError(e) from e
|
||||
|
|
|
@ -414,11 +414,11 @@ def parse_bytes(s):
|
|||
if suffix in units.keys() or suffix.isdigit():
|
||||
try:
|
||||
digits = float(digits_part)
|
||||
except ValueError:
|
||||
except ValueError as ve:
|
||||
raise errors.DockerException(
|
||||
'Failed converting the string value for memory '
|
||||
f'({digits_part}) to an integer.'
|
||||
)
|
||||
) from ve
|
||||
|
||||
# Reconvert to long for the final result
|
||||
s = int(digits * units[suffix])
|
||||
|
|
|
@ -778,8 +778,8 @@ class InMemoryStore(credentials.Store):
|
|||
def get(self, server):
|
||||
try:
|
||||
return self.__store[server]
|
||||
except KeyError:
|
||||
raise credentials.errors.CredentialsNotFound()
|
||||
except KeyError as ke:
|
||||
raise credentials.errors.CredentialsNotFound() from ke
|
||||
|
||||
def store(self, server, username, secret):
|
||||
self.__store[server] = {
|
||||
|
|
Loading…
Reference in New Issue