diff --git a/ivy/__init__.py b/ivy/__init__.py index 620bc5860fbd5..116c5212f8aed 100644 --- a/ivy/__init__.py +++ b/ivy/__init__.py @@ -443,8 +443,8 @@ def unknown_shape(rank=None, **kwargs): def with_rank(self, rank): try: return self.merge_with(self.unknown_shape(rank=rank)) - except ValueError: - raise ValueError(f"Shape {self} must have rank {rank}") + except ValueError as e: + raise ValueError(f"Shape {self} must have rank {rank}") from e def with_rank_at_least(self, rank): if self.rank is not None and self.rank < rank: diff --git a/ivy/functional/backends/jax/data_type.py b/ivy/functional/backends/jax/data_type.py index 5f2a13bb68113..1a84e9ad456e3 100644 --- a/ivy/functional/backends/jax/data_type.py +++ b/ivy/functional/backends/jax/data_type.py @@ -133,7 +133,7 @@ def broadcast_arrays(*arrays: JaxArray) -> List[JaxArray]: try: return jnp.broadcast_arrays(*arrays) except ValueError as e: - raise ivy.utils.exceptions.IvyBroadcastShapeError(e) + raise ivy.utils.exceptions.IvyBroadcastShapeError(e) from e def broadcast_to( diff --git a/ivy/functional/backends/jax/experimental/manipulation.py b/ivy/functional/backends/jax/experimental/manipulation.py index 0c224e74d090c..c519d7acfd16e 100644 --- a/ivy/functional/backends/jax/experimental/manipulation.py +++ b/ivy/functional/backends/jax/experimental/manipulation.py @@ -442,11 +442,11 @@ def take( if ivy.exists(axis): try: x_shape = x.shape[axis] - except Exception: + except Exception as e: raise ValueError( f"axis {axis} is out of bounds for array of dimension" f" {len(x.shape)}" - ) + ) from e else: x_shape = jnp.prod(x.shape) diff --git a/ivy/functional/backends/jax/manipulation.py b/ivy/functional/backends/jax/manipulation.py index 3d3b7c792a21c..30b6f2d9d7055 100644 --- a/ivy/functional/backends/jax/manipulation.py +++ b/ivy/functional/backends/jax/manipulation.py @@ -39,7 +39,7 @@ def concat( try: return jnp.concatenate(xs, axis) except ValueError as error: - raise ivy.utils.exceptions.IvyIndexError(error) + raise ivy.utils.exceptions.IvyIndexError(error) from error def expand_dims( @@ -54,7 +54,7 @@ def expand_dims( ret = jnp.expand_dims(x, axis) return ret except ValueError as error: - raise ivy.utils.exceptions.IvyIndexError(error) + raise ivy.utils.exceptions.IvyIndexError(error) from error def flip( @@ -143,7 +143,7 @@ def stack( try: return jnp.stack(arrays, axis=axis) except ValueError as error: - raise ivy.utils.exceptions.IvyIndexError(error) + raise ivy.utils.exceptions.IvyIndexError(error) from error # Extra # diff --git a/ivy/functional/backends/jax/utility.py b/ivy/functional/backends/jax/utility.py index 8fac7a2afbca6..6d2293ef45913 100644 --- a/ivy/functional/backends/jax/utility.py +++ b/ivy/functional/backends/jax/utility.py @@ -19,7 +19,7 @@ def all( try: return jnp.all(x, axis, keepdims=keepdims) except ValueError as error: - raise ivy.utils.exceptions.IvyIndexError(error) + raise ivy.utils.exceptions.IvyIndexError(error) from error def any( @@ -34,4 +34,4 @@ def any( try: return jnp.any(x, axis, keepdims=keepdims, out=out) except ValueError as error: - raise ivy.utils.exceptions.IvyIndexError(error) + raise ivy.utils.exceptions.IvyIndexError(error) from error diff --git a/ivy/functional/backends/numpy/data_type.py b/ivy/functional/backends/numpy/data_type.py index 0f59bfe7ac0e9..342eed678a793 100644 --- a/ivy/functional/backends/numpy/data_type.py +++ b/ivy/functional/backends/numpy/data_type.py @@ -130,7 +130,7 @@ def broadcast_arrays(*arrays: np.ndarray) -> List[np.ndarray]: try: return np.broadcast_arrays(*arrays) except ValueError as e: - raise ivy.utils.exceptions.IvyBroadcastShapeError(e) + raise ivy.utils.exceptions.IvyBroadcastShapeError(e) from e @with_unsupported_dtypes({"1.26.3 and below": ("complex",)}, backend_version) diff --git a/ivy/functional/backends/numpy/utility.py b/ivy/functional/backends/numpy/utility.py index de5597f952940..bba2ef86e03a7 100644 --- a/ivy/functional/backends/numpy/utility.py +++ b/ivy/functional/backends/numpy/utility.py @@ -17,7 +17,7 @@ def all( try: return np.asarray(np.all(x, axis=axis, keepdims=keepdims, out=out)) except np.AxisError as error: - raise ivy.utils.exceptions.IvyIndexError(error) + raise ivy.utils.exceptions.IvyIndexError(error) from error all.support_native_out = True diff --git a/ivy/functional/backends/paddle/experimental/manipulation.py b/ivy/functional/backends/paddle/experimental/manipulation.py index 61fdaae94fd93..1193133ae8366 100644 --- a/ivy/functional/backends/paddle/experimental/manipulation.py +++ b/ivy/functional/backends/paddle/experimental/manipulation.py @@ -774,7 +774,7 @@ def take( if ivy.exists(axis): try: x_shape = x.shape[axis] - except Exception: + except Exception as e: rank = len(x.shape) raise IndexError( "(OutOfRange) Attr(axis) is out of range, " @@ -784,7 +784,7 @@ def take( "(0 - input_dim.size()) == true, " "but received axis < input_dim.size() && axis >= " "(0 - input_dim.size()):0 != true:1.]" - ) + ) from e else: x_shape = paddle.prod(paddle.to_tensor(x.shape)) diff --git a/ivy/functional/backends/paddle/experimental/norms.py b/ivy/functional/backends/paddle/experimental/norms.py index c36bf0dda786f..092dda34030b6 100644 --- a/ivy/functional/backends/paddle/experimental/norms.py +++ b/ivy/functional/backends/paddle/experimental/norms.py @@ -55,11 +55,11 @@ def batch_norm( if data_format[-1] == "C" else data_formats[0:4][x.ndim - 2] ) - except IndexError: + except IndexError as e: raise IndexError( "data_format must be one of 'NC', 'NCL', 'NCHW', 'NCDHW', 'NLC', 'NHWC'," f" 'NDHWC' but receive {data_format}" - ) + ) from e with ivy.ArrayMode(False): if training: diff --git a/ivy/functional/backends/tensorflow/data_type.py b/ivy/functional/backends/tensorflow/data_type.py index 8b7209debc852..558120c4af568 100644 --- a/ivy/functional/backends/tensorflow/data_type.py +++ b/ivy/functional/backends/tensorflow/data_type.py @@ -117,7 +117,7 @@ def broadcast_arrays( try: desired_shape = tf.broadcast_dynamic_shape(arrays[0].shape, arrays[1].shape) except tf.errors.InvalidArgumentError as e: - raise ivy.utils.exceptions.IvyBroadcastShapeError(e) + raise ivy.utils.exceptions.IvyBroadcastShapeError(e) from e if len(arrays) > 2: for i in range(2, len(arrays)): try: @@ -125,7 +125,7 @@ def broadcast_arrays( desired_shape, arrays[i].shape ) except tf.errors.InvalidArgumentError as e: - raise ivy.utils.exceptions.IvyBroadcastShapeError(e) + raise ivy.utils.exceptions.IvyBroadcastShapeError(e) from e else: return [arrays[0]] result = [] diff --git a/ivy/functional/backends/tensorflow/manipulation.py b/ivy/functional/backends/tensorflow/manipulation.py index e2d6f8f13a2be..5e35641ab3c26 100644 --- a/ivy/functional/backends/tensorflow/manipulation.py +++ b/ivy/functional/backends/tensorflow/manipulation.py @@ -66,7 +66,7 @@ def expand_dims( ret = tf.reshape(x, shape=out_shape) return ret except (tf.errors.InvalidArgumentError, np.AxisError) as error: - raise ivy.utils.exceptions.IvyIndexError(error) + raise ivy.utils.exceptions.IvyIndexError(error) from error def flip( @@ -196,7 +196,7 @@ def stack( try: return tf.experimental.numpy.stack(arrays, axis) except ValueError as e: - raise ivy.utils.exceptions.IvyIndexError(e) + raise ivy.utils.exceptions.IvyIndexError(e) from e # Extra # diff --git a/ivy/functional/backends/tensorflow/utility.py b/ivy/functional/backends/tensorflow/utility.py index 997b6dca682dc..a09dedb163baa 100644 --- a/ivy/functional/backends/tensorflow/utility.py +++ b/ivy/functional/backends/tensorflow/utility.py @@ -24,7 +24,7 @@ def all( try: return tf.reduce_all(tf.cast(x, tf.bool), axis=axis, keepdims=keepdims) except tf.errors.InvalidArgumentError as e: - raise ivy.utils.exceptions.IvyIndexError(e) + raise ivy.utils.exceptions.IvyIndexError(e) from e def any( @@ -44,4 +44,4 @@ def any( try: return tf.reduce_any(tf.cast(x, tf.bool), axis=axis, keepdims=keepdims) except tf.errors.InvalidArgumentError as e: - raise ivy.utils.exceptions.IvyIndexError(e) + raise ivy.utils.exceptions.IvyIndexError(e) from e diff --git a/ivy/functional/backends/torch/data_type.py b/ivy/functional/backends/torch/data_type.py index b650590e6c290..db70c7e023852 100644 --- a/ivy/functional/backends/torch/data_type.py +++ b/ivy/functional/backends/torch/data_type.py @@ -91,7 +91,7 @@ def broadcast_arrays(*arrays: torch.Tensor) -> List[torch.Tensor]: try: return list(torch.broadcast_tensors(*arrays)) except RuntimeError as e: - raise ivy.utils.exceptions.IvyBroadcastShapeError(e) + raise ivy.utils.exceptions.IvyBroadcastShapeError(e) from e def broadcast_to( diff --git a/ivy/functional/backends/torch/experimental/manipulation.py b/ivy/functional/backends/torch/experimental/manipulation.py index 14265cebd1eea..7071e63922e41 100644 --- a/ivy/functional/backends/torch/experimental/manipulation.py +++ b/ivy/functional/backends/torch/experimental/manipulation.py @@ -550,13 +550,13 @@ def take( if ivy.exists(axis): try: x_shape = x.shape[axis] - except Exception: + except Exception as e: rank = len(x.shape) raise IndexError( "IndexError: Dimension out of range" f"(expected to be in range of[-{rank}, {rank-1}]" f", but got {axis})" - ) + ) from e else: x_shape = torch.prod(torch.tensor(x.shape)) diff --git a/ivy/functional/backends/torch/general.py b/ivy/functional/backends/torch/general.py index 2fb22fc355458..af88439c9ee57 100644 --- a/ivy/functional/backends/torch/general.py +++ b/ivy/functional/backends/torch/general.py @@ -387,10 +387,10 @@ def scatter_flat( if torch_scatter is None: try: import torch_scatter as torch_scatter - except ImportError: + except ImportError as e: raise ivy.utils.exceptions.IvyException( "Unable to import torch_scatter, verify this is correctly installed." - ) + ) from e if reduction == "replace": output[indices.type(torch.int64)] = updates res = output @@ -478,10 +478,10 @@ def scatter_nd( if torch_scatter is None: try: import torch_scatter as torch_scatter - except ImportError: + except ImportError as e: raise ivy.utils.exceptions.IvyException( "Unable to import torch_scatter, verify this is correctly installed." - ) + ) from e if reduction == "replace": flat_output[flat_indices_for_flat] = flat_updates flat_scatter = flat_output diff --git a/ivy/functional/frontends/jax/numpy/__init__.py b/ivy/functional/frontends/jax/numpy/__init__.py index 4b8a28a0a6ed9..2512624217a74 100644 --- a/ivy/functional/frontends/jax/numpy/__init__.py +++ b/ivy/functional/frontends/jax/numpy/__init__.py @@ -407,8 +407,10 @@ def promote_types_jax( """ try: ret = jax_promotion_table[(ivy.as_ivy_dtype(type1), ivy.as_ivy_dtype(type2))] - except KeyError: - raise ivy.utils.exceptions.IvyException("these dtypes are not type promotable") + except KeyError as e: + raise ivy.utils.exceptions.IvyException( + "these dtypes are not type promotable" + ) from e return ret diff --git a/ivy/functional/frontends/mxnet/numpy/__init__.py b/ivy/functional/frontends/mxnet/numpy/__init__.py index 1f8fb0f1393f8..cc7390eadec51 100644 --- a/ivy/functional/frontends/mxnet/numpy/__init__.py +++ b/ivy/functional/frontends/mxnet/numpy/__init__.py @@ -121,8 +121,10 @@ def promote_types_mxnet( """ try: ret = mxnet_promotion_table[(ivy.as_ivy_dtype(type1), ivy.as_ivy_dtype(type2))] - except KeyError: - raise ivy.utils.exceptions.IvyException("these dtypes are not type promotable") + except KeyError as e: + raise ivy.utils.exceptions.IvyException( + "these dtypes are not type promotable" + ) from e return ret diff --git a/ivy/functional/frontends/numpy/__init__.py b/ivy/functional/frontends/numpy/__init__.py index 9cdf0c1278021..2abc591a2ddcf 100644 --- a/ivy/functional/frontends/numpy/__init__.py +++ b/ivy/functional/frontends/numpy/__init__.py @@ -425,8 +425,10 @@ def promote_numpy_dtypes( type1, type2 = ivy.as_ivy_dtype(type1), ivy.as_ivy_dtype(type2) try: return numpy_promotion_table[(type1, type2)] - except KeyError: - raise ivy.utils.exceptions.IvyException("these dtypes are not type promotable") + except KeyError as e: + raise ivy.utils.exceptions.IvyException( + "these dtypes are not type promotable" + ) from e @handle_exceptions diff --git a/ivy/functional/frontends/numpy/data_type_routines/creating_data_types.py b/ivy/functional/frontends/numpy/data_type_routines/creating_data_types.py index bab1425b65a5c..84bd99b6b71d6 100644 --- a/ivy/functional/frontends/numpy/data_type_routines/creating_data_types.py +++ b/ivy/functional/frontends/numpy/data_type_routines/creating_data_types.py @@ -19,11 +19,11 @@ def __repr__(self): def __ge__(self, other): try: other = dtype(other) - except TypeError: + except TypeError as e: raise ivy.utils.exceptions.IvyException( "Attempted to compare a dtype with something which" "couldn't be interpreted as a dtype" - ) + ) from e return self == np_frontend.promote_numpy_dtypes( self._ivy_dtype, other._ivy_dtype @@ -32,22 +32,22 @@ def __ge__(self, other): def __gt__(self, other): try: other = dtype(other) - except TypeError: + except TypeError as e: raise ivy.utils.exceptions.IvyException( "Attempted to compare a dtype with something which" "couldn't be interpreted as a dtype" - ) + ) from e return self >= other and self != other def __lt__(self, other): try: other = dtype(other) - except TypeError: + except TypeError as e: raise ivy.utils.exceptions.IvyException( "Attempted to compare a dtype with something which" "couldn't be interpreted as a dtype" - ) + ) from e return self != np_frontend.promote_numpy_dtypes( self._ivy_dtype, other._ivy_dtype @@ -56,11 +56,11 @@ def __lt__(self, other): def __le__(self, other): try: other = dtype(other) - except TypeError: + except TypeError as e: raise ivy.utils.exceptions.IvyException( "Attempted to compare a dtype with something which" "couldn't be interpreted as a dtype" - ) + ) from e return self < other or self == other diff --git a/ivy/functional/frontends/onnx/__init__.py b/ivy/functional/frontends/onnx/__init__.py index 46a5fb5daad61..1eafc7e63a158 100644 --- a/ivy/functional/frontends/onnx/__init__.py +++ b/ivy/functional/frontends/onnx/__init__.py @@ -208,8 +208,10 @@ def promote_types_onnx( """ try: ret = onnx_promotion_table[(ivy.as_ivy_dtype(type1), ivy.as_ivy_dtype(type2))] - except KeyError: - raise ivy.utils.exceptions.IvyException("these dtypes are not type promotable") + except KeyError as e: + raise ivy.utils.exceptions.IvyException( + "these dtypes are not type promotable" + ) from e return ret diff --git a/ivy/functional/frontends/paddle/__init__.py b/ivy/functional/frontends/paddle/__init__.py index 58ae61580f139..33518fbaf3905 100644 --- a/ivy/functional/frontends/paddle/__init__.py +++ b/ivy/functional/frontends/paddle/__init__.py @@ -173,8 +173,10 @@ def promote_types_paddle( """ try: ret = paddle_promotion_table[(ivy.as_ivy_dtype(type1), ivy.as_ivy_dtype(type2))] - except KeyError: - raise ivy.utils.exceptions.IvyException("these dtypes are not type promotable") + except KeyError as e: + raise ivy.utils.exceptions.IvyException( + "these dtypes are not type promotable" + ) from e return ret diff --git a/ivy/functional/frontends/torch/__init__.py b/ivy/functional/frontends/torch/__init__.py index d502cb7e90d50..af031aa7c9d26 100644 --- a/ivy/functional/frontends/torch/__init__.py +++ b/ivy/functional/frontends/torch/__init__.py @@ -210,8 +210,10 @@ def promote_types_torch( ret = torch_frontend.torch_promotion_table[ (ivy.as_ivy_dtype(type1), ivy.as_ivy_dtype(type2)) ] - except KeyError: - raise ivy.utils.exceptions.IvyException("these dtypes are not type promotable") + except KeyError as e: + raise ivy.utils.exceptions.IvyException( + "these dtypes are not type promotable" + ) from e return ret diff --git a/ivy/functional/frontends/torch/linalg.py b/ivy/functional/frontends/torch/linalg.py index d34c0c5c09e2b..67d662d72f97e 100644 --- a/ivy/functional/frontends/torch/linalg.py +++ b/ivy/functional/frontends/torch/linalg.py @@ -23,7 +23,7 @@ def cholesky_ex(input, *, upper=False, check_errors=False, out=None): return matrix, info except RuntimeError as e: if check_errors: - raise RuntimeError(e) + raise RuntimeError(e) from e else: matrix = input * math.nan info = ivy.ones(input.shape[:-2], dtype=ivy.int32) @@ -292,7 +292,7 @@ def solve_ex(A, B, *, left=True, check_errors=False, out=None): return result, info except RuntimeError as e: if check_errors: - raise RuntimeError(e) + raise RuntimeError(e) from e else: result = A * math.nan info = ivy.ones(A.shape[:-2], dtype=ivy.int32) diff --git a/ivy/functional/frontends/torch/tensor.py b/ivy/functional/frontends/torch/tensor.py index 53f47aea2adef..e462fb61c736c 100644 --- a/ivy/functional/frontends/torch/tensor.py +++ b/ivy/functional/frontends/torch/tensor.py @@ -814,11 +814,11 @@ def size(self, dim=None): return shape try: return shape[dim] - except IndexError: + except IndexError as e: raise IndexError( f"Dimension out of range (expected to be in range of [{len(shape)}," f" {len(shape) - 1}], but got {dim}" - ) + ) from e def matmul(self, other): return torch_frontend.matmul(self, other) @@ -2290,8 +2290,10 @@ def __new__(cls, iterable=()): continue try: new_iterable.append(int(item)) - except Exception: - raise TypeError(f"Expected int, but got {type(item)} at index {i}") + except Exception as e: + raise TypeError( + f"Expected int, but got {type(item)} at index {i}" + ) from e return super().__new__(cls, tuple(new_iterable)) def __init__(self, shape) -> None: diff --git a/ivy/functional/ivy/experimental/linear_algebra.py b/ivy/functional/ivy/experimental/linear_algebra.py index dd0685bead55b..46ff186bd66bd 100644 --- a/ivy/functional/ivy/experimental/linear_algebra.py +++ b/ivy/functional/ivy/experimental/linear_algebra.py @@ -1385,11 +1385,11 @@ def initialize_tucker( """ try: assert len(x.shape) >= 2 - except ValueError: + except ValueError as e: raise ValueError( "expected x to have at least 2 dimensions but it has only" f" {len(x.shape)} dimension(s)" - ) + ) from e # Initialisation if init == "svd": @@ -1667,11 +1667,11 @@ def tucker( if fixed_factors: try: (core, factors) = init - except ValueError: + except ValueError as e: raise ValueError( f"Got fixed_factor={fixed_factors} but no appropriate Tucker tensor was" ' passed for "init".' - ) + ) from e if len(fixed_factors) == len(factors): return ivy.TuckerTensor((core, factors)) diff --git a/ivy/stateful/sequential.py b/ivy/stateful/sequential.py index 08bf1a1f56465..684f698a0dee1 100644 --- a/ivy/stateful/sequential.py +++ b/ivy/stateful/sequential.py @@ -34,13 +34,13 @@ def __init__( for i, submod in enumerate(sub_modules): try: submod.v = v["submodules"][f"v{str(i)}"] - except KeyError: + except KeyError as e: if submod.v: raise ivy.utils.exceptions.IvyException( "variables v passed to Sequential class must have key " "chains in the form of " '"submodules/v{}", where {} is an idx' - ) + ) from e self._submodules = list(sub_modules) Module.__init__(self, device=device, v=v, dtype=dtype) @@ -64,13 +64,13 @@ def _forward(self, inputs): for i, submod in enumerate(self._submodules): try: x = submod(x, v=self.v.submodules[f"v{str(i)}"]) - except KeyError: + except KeyError as e: if submod.v: raise ivy.utils.exceptions.IvyException( "variables v passed to Sequential class must have key chains " "in the form of " '"submodules/v{}", where {} is an idx' - ) + ) from e x = submod(x) return x