Skip to content

Commit

Permalink
🤖 Lint code
Browse files Browse the repository at this point in the history
  • Loading branch information
ivy-branch committed Dec 9, 2023
1 parent 329d16c commit e14032d
Show file tree
Hide file tree
Showing 55 changed files with 1,684 additions and 1,354 deletions.
16 changes: 8 additions & 8 deletions ivy/functional/frontends/numpy/fft/discrete_fourier_transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,14 @@
from ivy.func_wrapper import with_unsupported_dtypes


_SWAP_DIRECTION_MAP = {
None: "forward",
"backward": "forward",
"ortho": "ortho",
"forward": "backward",
}


# --- Helpers --- #
# --------------- #

Expand Down Expand Up @@ -148,11 +156,3 @@ def rfftfreq(n, d=1.0):
def rfftn(a, s=None, axes=None, norm=None):
a = ivy.asarray(a, dtype=ivy.complex128)
return ivy.rfftn(a, s=s, axes=axes, norm=norm)


_SWAP_DIRECTION_MAP = {
None: "forward",
"backward": "forward",
"ortho": "ortho",
"forward": "backward",
}
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,12 @@ def put_along_axis(arr, indices, values, axis):
ivy.put_along_axis(arr, indices, values, axis)


@to_ivy_arrays_and_back
@handle_numpy_out
def take(a, indices, /, *, axis=None, out=None, mode="raise"):
return ivy.take(a, indices, axis=axis, out=out, mode=mode)


@to_ivy_arrays_and_back
def take_along_axis(arr, indices, axis):
return ivy.take_along_axis(arr, indices, axis)
Expand All @@ -98,9 +104,3 @@ def tril_indices(n, k=0, m=None):
def unravel_index(indices, shape, order="C"):
ret = [x.astype("int64") for x in ivy.unravel_index(indices, shape)]
return tuple(ret)


@to_ivy_arrays_and_back
@handle_numpy_out
def take(a, indices, /, *, axis=None, out=None, mode="raise"):
return ivy.take(a, indices, axis=axis, out=out, mode=mode)
13 changes: 8 additions & 5 deletions ivy/functional/frontends/numpy/ma/MaskedArray.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import ivy.functional.frontends.numpy as np_frontend
import numpy as np


masked = True
masked_print_options = "--"
nomask = False

Expand Down Expand Up @@ -177,10 +179,12 @@ def _array_in_str(self):
return masked_print_options
return str(self._data.to_list())
if ivy.any(self._mask):
return str([
masked_print_options if mask else x
for x, mask in zip(self._data.to_list(), self._mask.to_list())
])
return str(
[
masked_print_options if mask else x
for x, mask in zip(self._data.to_list(), self._mask.to_list())
]
)
return str(self._data.to_list())


Expand All @@ -192,7 +196,6 @@ def _is_masked_array(x):
return isinstance(x, (np.ma.MaskedArray, np_frontend.ma.MaskedArray))


masked = True
# Instance Methods #
# ---------------- #

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,12 @@ def _divmod(
ret = ivy.where(
where,
ret,
([
ivy.default(out, ivy.zeros_like(ret[0])),
ivy.default(out, ivy.zeros_like(ret[1])),
]),
(
[
ivy.default(out, ivy.zeros_like(ret[0])),
ivy.default(out, ivy.zeros_like(ret[1])),
]
),
out=out,
)
return ret
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,9 @@ def var(x, /, *, axis=None, ddof=0.0, keepdims=False, out=None, dtype=None, wher
dtype = (
dtype
if dtype is not None
else ivy.float64 if ivy.is_int_dtype(x.dtype) else x.dtype
else ivy.float64
if ivy.is_int_dtype(x.dtype)
else x.dtype
)
ret = ivy.var(x, axis=axis, correction=ddof, keepdims=keepdims, out=out)
ret = (
Expand Down
24 changes: 14 additions & 10 deletions ivy/functional/frontends/numpy/ufunc/methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,20 +214,24 @@ def __init__(self, name) -> None:
@property
def nargs(self):
sig = inspect.signature(self.func)
return len([
param
for param in sig.parameters.values()
if param.kind in [param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD]
])
return len(
[
param
for param in sig.parameters.values()
if param.kind in [param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD]
]
)

@property
def nin(self):
sig = inspect.signature(self.func)
return len([
param
for param in sig.parameters.values()
if param.kind == param.POSITIONAL_ONLY
])
return len(
[
param
for param in sig.parameters.values()
if param.kind == param.POSITIONAL_ONLY
]
)

@property
def nout(self):
Expand Down
3 changes: 1 addition & 2 deletions ivy/functional/frontends/paddle/nn/functional/pooling.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,8 +118,7 @@ def max_pool2d(
if data_format not in ["NCHW", "NHWC"]:
raise ValueError(
"Attr(data_format) should be 'NCHW' or 'NHWC'. Received "
"Attr(data_format): %s."
% str(data_format)
"Attr(data_format): %s." % str(data_format)
)

if data_format == "NHWC" and return_mask:
Expand Down
30 changes: 15 additions & 15 deletions ivy/functional/frontends/paddle/nn/functional/vision.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ def affine_grid(theta, out_shape, align_corners=True):
base_grid[:, :, :, 0] = ivy.linspace(-1, 1, W)
base_grid[:, :, :, 1] = ivy.expand_dims(ivy.linspace(-1, 1, H), axis=-1)
height_values = ivy.expand_dims(ivy.linspace(-1, 1, H), axis=-1)
base_grid[:, :, :, 1] = ivy.array([
[[height_values[i]] * W for i in range(H)]
])[:, :, :, 0]
base_grid[:, :, :, 1] = ivy.array(
[[[height_values[i]] * W for i in range(H)]]
)[:, :, :, 0]
base_grid[:, :, :, 2] = ivy.full((H, W), 1)
grid = ivy.matmul(base_grid.view((N, H * W, 3)), theta.swapaxes(1, 2))
return grid.view((N, H, W, 2))
Expand All @@ -32,9 +32,9 @@ def affine_grid(theta, out_shape, align_corners=True):
height_values = ivy.expand_dims(
ivy.linspace(-1, 1, H) * (H - 1) / H, axis=-1
)
base_grid[:, :, :, 1] = ivy.array([
[[height_values[i]] * W for i in range(H)]
])[:, :, :, 0]
base_grid[:, :, :, 1] = ivy.array(
[[[height_values[i]] * W for i in range(H)]]
)[:, :, :, 0]
base_grid[:, :, :, 2] = ivy.full((H, W), 1)
grid = ivy.matmul(base_grid.view((N, H * W, 3)), ivy.swapaxes(theta, 1, 2))
return grid.view((N, H, W, 2))
Expand All @@ -45,9 +45,9 @@ def affine_grid(theta, out_shape, align_corners=True):
base_grid[:, :, :, :, 0] = ivy.linspace(-1, 1, W)
base_grid[:, :, :, :, 1] = ivy.expand_dims(ivy.linspace(-1, 1, H), axis=-1)
height_values = ivy.linspace(-1, 1, H)
base_grid[:, :, :, :, 1] = ivy.array([
[[[height_values[i]] * W for i in range(H)]] * D
])
base_grid[:, :, :, :, 1] = ivy.array(
[[[[height_values[i]] * W for i in range(H)]] * D]
)
base_grid[:, :, :, :, 2] = ivy.expand_dims(
ivy.expand_dims(ivy.linspace(-1, 1, D), axis=-1), axis=-1
)
Expand All @@ -58,17 +58,17 @@ def affine_grid(theta, out_shape, align_corners=True):
ivy.linspace(-1, 1, H) * (H - 1) / H, axis=-1
)
height_values = ivy.linspace(-1, 1, H) * (H - 1) / H
base_grid[:, :, :, :, 1] = ivy.array([
[[[height_values[i]] * W for i in range(H)]] * D
])
base_grid[:, :, :, :, 1] = ivy.array(
[[[[height_values[i]] * W for i in range(H)]] * D]
)
base_grid[:, :, :, :, 2] = ivy.expand_dims(
ivy.expand_dims(ivy.linspace(-1, 1, D) * (D - 1) / D, axis=-1), axis=-1
)
width_values = ivy.linspace(-1, 1, D) * (D - 1) / D

base_grid[:, :, :, :, 2] = ivy.array([
[ivy.array([[width_values[i]] * W] * H) for i in range(D)]
])
base_grid[:, :, :, :, 2] = ivy.array(
[[ivy.array([[width_values[i]] * W] * H) for i in range(D)]]
)
base_grid[:, :, :, :, 3] = ivy.full((D, H, W), 1)
grid = ivy.matmul(base_grid.view((N, D * H * W, 4)), theta.swapaxes(1, 2))
return grid.view((N, D, H, W, 3))
Expand Down
18 changes: 12 additions & 6 deletions ivy/functional/frontends/sklearn/model_selection/_split.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,12 @@ def _iter_test_indices(self, X=None, y=None, groups=None):

n_classes = len(y_idx)
y_order = ivy.sort(y_encoded)
allocation = ivy.asarray([
ivy.bincount(y_order[i :: self.n_splits], minlength=n_classes)
for i in range(self.n_splits)
])
allocation = ivy.asarray(
[
ivy.bincount(y_order[i :: self.n_splits], minlength=n_classes)
for i in range(self.n_splits)
]
)
test_folds = ivy.empty(len(y), dtype="int64")
for k in range(n_classes):
folds_for_class = ivy.arange(self.n_splits).repeat(allocation[:, k])
Expand Down Expand Up @@ -121,12 +123,16 @@ def train_test_split(
n_train = (
ivy.floor(train_size * n_samples)
if isinstance(train_size, float)
else float(train_size) if isinstance(train_size, int) else None
else float(train_size)
if isinstance(train_size, int)
else None
)
n_test = (
ivy.ceil(test_size * n_samples)
if isinstance(test_size, float)
else float(test_size) if isinstance(test_size, int) else None
else float(test_size)
if isinstance(test_size, int)
else None
)
if train_size is None:
n_train = n_samples - n_test
Expand Down
38 changes: 22 additions & 16 deletions ivy/functional/frontends/sklearn/tree/_criterion.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,27 +107,33 @@ def init(

def reset(self):
self.pos = self.start
self.weighted_n_left, self.weighted_n_right, self.sum_left, self.sum_right = (
_move_sums_classification(
self,
self.sum_left,
self.sum_right,
self.weighted_n_left,
self.weighted_n_right,
)
(
self.weighted_n_left,
self.weighted_n_right,
self.sum_left,
self.sum_right,
) = _move_sums_classification(
self,
self.sum_left,
self.sum_right,
self.weighted_n_left,
self.weighted_n_right,
)
return 0

def reverse_reset(self):
self.pos = self.end
self.weighted_n_right, self.weighted_n_left, self.sum_right, self.sum_left = (
_move_sums_classification(
self,
self.sum_right,
self.sum_left,
self.weighted_n_right,
self.weighted_n_left,
)
(
self.weighted_n_right,
self.weighted_n_left,
self.sum_right,
self.sum_left,
) = _move_sums_classification(
self,
self.sum_right,
self.sum_left,
self.weighted_n_right,
self.weighted_n_left,
)
return 0

Expand Down
27 changes: 14 additions & 13 deletions ivy/functional/frontends/sklearn/tree/_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
EPSILON = ivy.finfo(ivy.double).eps
INFINITY = ivy.inf
INTPTR_MAX = ivy.iinfo(ivy.int32).max
TREE_UNDEFINED = -2
_TREE_UNDEFINED = TREE_UNDEFINED
TREE_LEAF = -1
TREE_UNDEFINED = -2
_TREE_LEAF = TREE_LEAF
_TREE_UNDEFINED = TREE_UNDEFINED


class Node:
Expand Down Expand Up @@ -57,17 +57,19 @@ def _resize_c(self, capacity=INTPTR_MAX):
dtype=ivy.float32,
)
else:
self.value = ivy.concat([
self.value,
ivy.zeros(
(
int(capacity - self.capacity),
int(self.n_outputs),
int(self.max_n_classes),
self.value = ivy.concat(
[
self.value,
ivy.zeros(
(
int(capacity - self.capacity),
int(self.n_outputs),
int(self.max_n_classes),
),
dtype=ivy.float32,
),
dtype=ivy.float32,
),
])
]
)
if capacity < self.node_count:
self.node_count = capacity
self.capacity = capacity
Expand Down Expand Up @@ -199,7 +201,6 @@ def __init__(
def build(
self, tree, X, y, sample_weight=None, missing_values_in_feature_mask=None
):

if tree.max_depth <= 10:
init_capacity = int(2 ** (tree.max_depth + 1)) - 1
else:
Expand Down
13 changes: 9 additions & 4 deletions ivy/functional/frontends/tensorflow/keras/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,15 @@ def _top_k(input, topk):

labels = ivy.shape(predictions)[1]
# float comparison?
return ivy.array([
(0 <= res < labels and ivy.min(top_k[ind] - predictions[ind, res]) <= 1e-9)
for ind, res in enumerate(targets)
])
return ivy.array(
[
(
0 <= res < labels
and ivy.min(top_k[ind] - predictions[ind, res]) <= 1e-9
)
for ind, res in enumerate(targets)
]
)

reshape = False
y_true = ivy.array(y_true)
Expand Down
12 changes: 7 additions & 5 deletions ivy/functional/frontends/tensorflow/linalg.py
Original file line number Diff line number Diff line change
Expand Up @@ -496,11 +496,13 @@ def tridiagonal_solve(
dim = diagonals[0].shape[0]
diagonals[[0, -1], [-1, 0]] = 0
dummy_idx = [0, 0]
indices = ivy.array([
[(i, i + 1) for i in range(dim - 1)] + [dummy_idx],
[(i, i) for i in range(dim)],
[dummy_idx] + [(i + 1, i) for i in range(dim - 1)],
])
indices = ivy.array(
[
[(i, i + 1) for i in range(dim - 1)] + [dummy_idx],
[(i, i) for i in range(dim)],
[dummy_idx] + [(i + 1, i) for i in range(dim - 1)],
]
)
constructed_matrix = ivy.scatter_nd(
indices, diagonals, shape=ivy.array([dim, dim])
)
Expand Down
Loading

0 comments on commit e14032d

Please sign in to comment.