Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

TYP: Check untyped defs (except vendored) #37556

Merged
merged 69 commits into from
Nov 9, 2020
Merged
Show file tree
Hide file tree
Changes from 63 commits
Commits
Show all changes
69 commits
Select commit Hold shift + click to select a range
30daa58
[mypy-pandas._testing]
simonjayhawkins Oct 4, 2020
3143012
[mypy-pandas.compat.pickle_compat]
simonjayhawkins Oct 4, 2020
90c9a5f
[mypy-pandas.core.apply]
simonjayhawkins Oct 4, 2020
07f3a9e
[mypy-pandas.core.arrays.base]
simonjayhawkins Oct 4, 2020
9cd6c69
[mypy-pandas.core.arrays.datetimelike]
simonjayhawkins Oct 4, 2020
f3f3103
[mypy-pandas.core.arrays.sparse.array]
simonjayhawkins Oct 4, 2020
ec0fd6c
[mypy-pandas.core.arrays.string_]
simonjayhawkins Oct 4, 2020
4eecd56
[mypy-pandas.core.base]
simonjayhawkins Oct 4, 2020
e9a3905
black fixup
simonjayhawkins Oct 4, 2020
7d13f9b
[mypy-pandas.core.computation.expr]
simonjayhawkins Oct 4, 2020
2684e52
[mypy-pandas.core.computation.expressions]
simonjayhawkins Oct 4, 2020
cf4bc0a
[mypy-pandas.core.computation.ops]
simonjayhawkins Oct 4, 2020
c3bd897
[mypy-pandas.core.computation.pytables]
simonjayhawkins Oct 4, 2020
b97ca7b
[mypy-pandas.core.computation.scope]
simonjayhawkins Oct 4, 2020
da41a72
[mypy-pandas.core.frame]
simonjayhawkins Oct 4, 2020
4a1cdea
[mypy-pandas.core.generic]
simonjayhawkins Oct 4, 2020
2091145
[mypy-pandas.core.groupby.base]
simonjayhawkins Oct 4, 2020
25b0a17
[mypy-pandas.core.groupby.grouper]
simonjayhawkins Oct 4, 2020
2598d53
[mypy-pandas.core.groupby.ops]
simonjayhawkins Oct 4, 2020
464ef35
[mypy-pandas.core.indexes.base]
simonjayhawkins Oct 5, 2020
74b9c36
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 5, 2020
67b28fd
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 6, 2020
c5499e5
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 6, 2020
98f4413
remove unused 'type: ignore' comments
simonjayhawkins Oct 6, 2020
9d73a61
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 7, 2020
c85d090
ignore newly reported errors
simonjayhawkins Oct 7, 2020
c3c65a7
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 7, 2020
3c9f39b
remove unused 'type: ignore' comment
simonjayhawkins Oct 7, 2020
76b3228
[mypy-pandas.core.indexes.category]
simonjayhawkins Oct 7, 2020
0bc418a
[mypy-pandas.core.indexes.datetimelike]
simonjayhawkins Oct 7, 2020
68e63aa
[mypy-pandas.core.indexes.datetimes]
simonjayhawkins Oct 8, 2020
21cdc17
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 8, 2020
00f7a3a
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 8, 2020
ce89fde
[mypy-pandas.core.indexes.extension]
simonjayhawkins Oct 8, 2020
8d84e75
[mypy-pandas.core.indexes.interval]
simonjayhawkins Oct 8, 2020
f7ff3b7
wip
simonjayhawkins Oct 8, 2020
93c8ea6
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 8, 2020
2daf79f
wip
simonjayhawkins Oct 8, 2020
5d91919
io
simonjayhawkins Oct 9, 2020
ae27da3
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 9, 2020
b208a18
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 9, 2020
dcbab31
plotting
simonjayhawkins Oct 9, 2020
951423f
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 10, 2020
3100ad6
add ignore after merge master
simonjayhawkins Oct 10, 2020
aff08e9
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 10, 2020
8760bbc
remove unused 'type: ignore' comments
simonjayhawkins Oct 10, 2020
13426fe
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 10, 2020
b672f75
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 12, 2020
c0bcd34
remove unused 'type: ignore' comments
simonjayhawkins Oct 12, 2020
994780e
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 12, 2020
28f82ba
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 12, 2020
f0d6dea
remove unused 'type: ignore' comments
simonjayhawkins Oct 12, 2020
66df2be
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 12, 2020
ec10939
black fixup
simonjayhawkins Oct 12, 2020
f46179f
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 15, 2020
fe92e38
remove unused 'type: ignore' comments
simonjayhawkins Oct 15, 2020
f26a45a
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 17, 2020
d1f19da
remove unused 'type: ignore' comment
simonjayhawkins Oct 17, 2020
8349b33
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 24, 2020
6b5ef52
remove unused 'type: ignore' comment
simonjayhawkins Oct 24, 2020
92a4aa9
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Oct 31, 2020
4ca8e45
remove unused 'type: ignore' comments
simonjayhawkins Oct 31, 2020
58142c3
tidy diff
simonjayhawkins Oct 31, 2020
2752b25
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Nov 1, 2020
f986ac3
fixup for ci
simonjayhawkins Nov 1, 2020
055d11a
remove assignment to tmp
simonjayhawkins Nov 1, 2020
46a538b
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Nov 8, 2020
0a13156
add ignores to pandas\io\parsers.py
simonjayhawkins Nov 8, 2020
7279f31
Merge remote-tracking branch 'upstream/master' into untyped-defs
simonjayhawkins Nov 9, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
71 changes: 56 additions & 15 deletions pandas/_testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,14 +117,24 @@ def set_testing_mode():
# set the testing mode filters
testing_mode = os.environ.get("PANDAS_TESTING_MODE", "None")
if "deprecate" in testing_mode:
warnings.simplefilter("always", _testing_mode_warnings)
# pandas\_testing.py:119: error: Argument 2 to "simplefilter" has
# incompatible type "Tuple[Type[DeprecationWarning],
# Type[ResourceWarning]]"; expected "Type[Warning]"
warnings.simplefilter(
"always", _testing_mode_warnings # type: ignore[arg-type]
)


def reset_testing_mode():
# reset the testing mode filters
testing_mode = os.environ.get("PANDAS_TESTING_MODE", "None")
if "deprecate" in testing_mode:
warnings.simplefilter("ignore", _testing_mode_warnings)
# pandas\_testing.py:126: error: Argument 2 to "simplefilter" has
# incompatible type "Tuple[Type[DeprecationWarning],
# Type[ResourceWarning]]"; expected "Type[Warning]"
warnings.simplefilter(
"ignore", _testing_mode_warnings # type: ignore[arg-type]
)


set_testing_mode()
Expand Down Expand Up @@ -241,16 +251,22 @@ def decompress_file(path, compression):
if compression is None:
f = open(path, "rb")
elif compression == "gzip":
f = gzip.open(path, "rb")
# pandas\_testing.py:243: error: Incompatible types in assignment
# (expression has type "IO[Any]", variable has type "BinaryIO")
f = gzip.open(path, "rb") # type: ignore[assignment]
elif compression == "bz2":
f = bz2.BZ2File(path, "rb")
# pandas\_testing.py:245: error: Incompatible types in assignment
# (expression has type "BZ2File", variable has type "BinaryIO")
f = bz2.BZ2File(path, "rb") # type: ignore[assignment]
elif compression == "xz":
f = get_lzma_file(lzma)(path, "rb")
elif compression == "zip":
zip_file = zipfile.ZipFile(path)
zip_names = zip_file.namelist()
if len(zip_names) == 1:
f = zip_file.open(zip_names.pop())
# pandas\_testing.py:252: error: Incompatible types in assignment
# (expression has type "IO[bytes]", variable has type "BinaryIO")
f = zip_file.open(zip_names.pop()) # type: ignore[assignment]
else:
raise ValueError(f"ZIP file {path} error. Only one file per ZIP.")
else:
Expand Down Expand Up @@ -286,9 +302,15 @@ def write_to_compressed(compression, path, data, dest="test"):
if compression == "zip":
compress_method = zipfile.ZipFile
elif compression == "gzip":
compress_method = gzip.GzipFile
# pandas\_testing.py:288: error: Incompatible types in assignment
# (expression has type "Type[GzipFile]", variable has type
# "Type[ZipFile]")
compress_method = gzip.GzipFile # type: ignore[assignment]
elif compression == "bz2":
compress_method = bz2.BZ2File
# pandas\_testing.py:290: error: Incompatible types in assignment
# (expression has type "Type[BZ2File]", variable has type
# "Type[ZipFile]")
compress_method = bz2.BZ2File # type: ignore[assignment]
elif compression == "xz":
compress_method = get_lzma_file(lzma)
else:
Expand All @@ -300,7 +322,10 @@ def write_to_compressed(compression, path, data, dest="test"):
method = "writestr"
else:
mode = "wb"
args = (data,)
# pandas\_testing.py:302: error: Incompatible types in assignment
# (expression has type "Tuple[Any]", variable has type "Tuple[Any,
# Any]")
args = (data,) # type: ignore[assignment]
method = "write"

with compress_method(path, mode=mode) as f:
Expand Down Expand Up @@ -1984,7 +2009,8 @@ def all_timeseries_index_generator(k=10):
"""
make_index_funcs = [makeDateIndex, makePeriodIndex, makeTimedeltaIndex]
for make_index_func in make_index_funcs:
yield make_index_func(k=k)
# pandas\_testing.py:1986: error: Cannot call function of unknown type
yield make_index_func(k=k) # type: ignore[operator]


# make series
Expand Down Expand Up @@ -2118,7 +2144,8 @@ def makeCustomIndex(
p=makePeriodIndex,
).get(idx_type)
if idx_func:
idx = idx_func(nentries)
# pandas\_testing.py:2120: error: Cannot call function of unknown type
idx = idx_func(nentries) # type: ignore[operator]
# but we need to fill in the name
if names:
idx.name = names[0]
Expand Down Expand Up @@ -2146,7 +2173,8 @@ def keyfunc(x):

# build a list of lists to create the index from
div_factor = nentries // ndupe_l[i] + 1
cnt = Counter()
# pandas\_testing.py:2148: error: Need type annotation for 'cnt'
cnt = Counter() # type: ignore[var-annotated]
for j in range(div_factor):
label = f"{prefix}_l{i}_g{j}"
cnt[label] = ndupe_l[i]
Expand Down Expand Up @@ -2304,7 +2332,14 @@ def _gen_unique_rand(rng, _extra_size):

def makeMissingDataframe(density=0.9, random_state=None):
df = makeDataFrame()
i, j = _create_missing_idx(*df.shape, density=density, random_state=random_state)
# pandas\_testing.py:2306: error: "_create_missing_idx" gets multiple
# values for keyword argument "density" [misc]

# pandas\_testing.py:2306: error: "_create_missing_idx" gets multiple
# values for keyword argument "random_state" [misc]
i, j = _create_missing_idx( # type: ignore[misc]
*df.shape, density=density, random_state=random_state
)
df.values[i, j] = np.nan
return df

Expand All @@ -2329,7 +2364,10 @@ def dec(f):
is_decorating = not kwargs and len(args) == 1 and callable(args[0])
if is_decorating:
f = args[0]
args = []
# pandas\_testing.py:2331: error: Incompatible types in assignment
# (expression has type "List[<nothing>]", variable has type
# "Tuple[Any, ...]")
args = [] # type: ignore[assignment]
return dec(f)
else:
return dec
Expand Down Expand Up @@ -2519,7 +2557,9 @@ def wrapper(*args, **kwargs):
except Exception as err:
errno = getattr(err, "errno", None)
if not errno and hasattr(errno, "reason"):
errno = getattr(err.reason, "errno", None)
# pandas\_testing.py:2521: error: "Exception" has no attribute
# "reason"
errno = getattr(err.reason, "errno", None) # type: ignore[attr-defined]

if errno in skip_errnos:
skip(f"Skipping test due to known errno and error {err}")
Expand Down Expand Up @@ -2869,7 +2909,8 @@ def setTZ(tz):
pass
else:
os.environ["TZ"] = tz
time.tzset()
# pandas\_testing.py:2845: error: Module has no attribute "tzset"
time.tzset() # type: ignore[attr-defined]

orig_tz = os.environ.get("TZ")
setTZ(tz)
Expand Down
6 changes: 5 additions & 1 deletion pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,11 @@ def get_result(self):
""" compute the results """
# dispatch to agg
if is_list_like(self.f) or is_dict_like(self.f):
return self.obj.aggregate(self.f, axis=self.axis, *self.args, **self.kwds)
# pandas\core\apply.py:144: error: "aggregate" of "DataFrame" gets
# multiple values for keyword argument "axis"
return self.obj.aggregate( # type: ignore[misc]
self.f, axis=self.axis, *self.args, **self.kwds
)

# all empty
if len(self.columns) == 0 and len(self.index) == 0:
Expand Down
12 changes: 9 additions & 3 deletions pandas/core/arrays/datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,7 +429,9 @@ def _validate_comparison_value(self, other):
raise InvalidComparison(other)

if isinstance(other, self._recognized_scalars) or other is NaT:
other = self._scalar_type(other)
# pandas\core\arrays\datetimelike.py:432: error: Too many arguments
# for "object" [call-arg]
other = self._scalar_type(other) # type: ignore[call-arg]
try:
self._check_compatible_with(other)
except TypeError as err:
Expand Down Expand Up @@ -487,14 +489,18 @@ def _validate_shift_value(self, fill_value):
if is_valid_nat_for_dtype(fill_value, self.dtype):
fill_value = NaT
elif isinstance(fill_value, self._recognized_scalars):
fill_value = self._scalar_type(fill_value)
# pandas\core\arrays\datetimelike.py:746: error: Too many arguments
# for "object" [call-arg]
fill_value = self._scalar_type(fill_value) # type: ignore[call-arg]
else:
# only warn if we're not going to raise
if self._scalar_type is Period and lib.is_integer(fill_value):
# kludge for #31971 since Period(integer) tries to cast to str
new_fill = Period._from_ordinal(fill_value, freq=self.freq)
else:
new_fill = self._scalar_type(fill_value)
# pandas\core\arrays\datetimelike.py:753: error: Too many
# arguments for "object" [call-arg]
new_fill = self._scalar_type(fill_value) # type: ignore[call-arg]

# stacklevel here is chosen to be correct when called from
# DataFrame.shift or Series.shift
Expand Down
5 changes: 4 additions & 1 deletion pandas/core/arrays/string_.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,10 @@ def __init__(self, values, copy=False):
values = extract_array(values)

super().__init__(values, copy=copy)
self._dtype = StringDtype()
# pandas\core\arrays\string_.py:188: error: Incompatible types in
# assignment (expression has type "StringDtype", variable has type
# "PandasDtype") [assignment]
self._dtype = StringDtype() # type: ignore[assignment]
if not isinstance(values, type(self)):
self._validate()

Expand Down
86 changes: 69 additions & 17 deletions pandas/core/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,9 @@ def __sizeof__(self):
either a value or Series of values
"""
if hasattr(self, "memory_usage"):
mem = self.memory_usage(deep=True)
# pandas\core\base.py:84: error: "PandasObject" has no attribute
# "memory_usage" [attr-defined]
mem = self.memory_usage(deep=True) # type: ignore[attr-defined]
return int(mem if is_scalar(mem) else mem.sum())

# no memory_usage attribute, so fall back to object's 'sizeof'
Expand Down Expand Up @@ -204,32 +206,65 @@ def _selection_list(self):

@cache_readonly
def _selected_obj(self):
if self._selection is None or isinstance(self.obj, ABCSeries):
return self.obj
# pandas\core\base.py:195: error: "SelectionMixin" has no attribute
# "obj" [attr-defined]
if self._selection is None or isinstance(
self.obj, ABCSeries # type: ignore[attr-defined]
):
# pandas\core\base.py:194: error: "SelectionMixin" has no attribute
# "obj" [attr-defined]
return self.obj # type: ignore[attr-defined]
else:
return self.obj[self._selection]
# pandas\core\base.py:204: error: "SelectionMixin" has no attribute
# "obj" [attr-defined]
return self.obj[self._selection] # type: ignore[attr-defined]

@cache_readonly
def ndim(self) -> int:
return self._selected_obj.ndim

@cache_readonly
def _obj_with_exclusions(self):
if self._selection is not None and isinstance(self.obj, ABCDataFrame):
return self.obj.reindex(columns=self._selection_list)
# pandas\core\base.py:209: error: "SelectionMixin" has no attribute
# "obj" [attr-defined]
if self._selection is not None and isinstance(
self.obj, ABCDataFrame # type: ignore[attr-defined]
):
# pandas\core\base.py:217: error: "SelectionMixin" has no attribute
# "obj" [attr-defined]
return self.obj.reindex( # type: ignore[attr-defined]
columns=self._selection_list
)

# pandas\core\base.py:207: error: "SelectionMixin" has no attribute
# "exclusions" [attr-defined]
if len(self.exclusions) > 0: # type: ignore[attr-defined]
# pandas\core\base.py:208: error: "SelectionMixin" has no attribute
# "obj" [attr-defined]

if len(self.exclusions) > 0:
return self.obj.drop(self.exclusions, axis=1)
# pandas\core\base.py:208: error: "SelectionMixin" has no attribute
# "exclusions" [attr-defined]
return self.obj.drop(self.exclusions, axis=1) # type: ignore[attr-defined]
else:
return self.obj
# pandas\core\base.py:210: error: "SelectionMixin" has no attribute
# "obj" [attr-defined]
return self.obj # type: ignore[attr-defined]

def __getitem__(self, key):
if self._selection is not None:
raise IndexError(f"Column(s) {self._selection} already selected")

if isinstance(key, (list, tuple, ABCSeries, ABCIndexClass, np.ndarray)):
if len(self.obj.columns.intersection(key)) != len(key):
bad_keys = list(set(key).difference(self.obj.columns))
# pandas\core\base.py:217: error: "SelectionMixin" has no attribute
# "obj" [attr-defined]
if len(
self.obj.columns.intersection(key) # type: ignore[attr-defined]
) != len(key):
# pandas\core\base.py:218: error: "SelectionMixin" has no
# attribute "obj" [attr-defined]
bad_keys = list(
set(key).difference(self.obj.columns) # type: ignore[attr-defined]
)
raise KeyError(f"Columns not found: {str(bad_keys)[1:-1]}")
return self._gotitem(list(key), ndim=2)

Expand Down Expand Up @@ -559,7 +594,11 @@ def to_numpy(self, dtype=None, copy=False, na_value=lib.no_default, **kwargs):
dtype='datetime64[ns]')
"""
if is_extension_array_dtype(self.dtype):
return self.array.to_numpy(dtype, copy=copy, na_value=na_value, **kwargs)
# pandas\core\base.py:837: error: Too many arguments for "to_numpy"
# of "ExtensionArray" [call-arg]
return self.array.to_numpy( # type: ignore[call-arg]
dtype, copy=copy, na_value=na_value, **kwargs
)
elif kwargs:
bad_keys = list(kwargs.keys())[0]
raise TypeError(
Expand Down Expand Up @@ -850,8 +889,15 @@ def _map_values(self, mapper, na_action=None):
if is_categorical_dtype(self.dtype):
# use the built in categorical series mapper which saves
# time by mapping the categories instead of all values
self = cast("Categorical", self)
return self._values.map(mapper)

# pandas\core\base.py:893: error: Incompatible types in
# assignment (expression has type "Categorical", variable has
# type "IndexOpsMixin") [assignment]
self = cast("Categorical", self) # type: ignore[assignment]
# pandas\core\base.py:894: error: Item "ExtensionArray" of
# "Union[ExtensionArray, Any]" has no attribute "map"
# [union-attr]
return self._values.map(mapper) # type: ignore[union-attr]

values = self._values

Expand All @@ -868,7 +914,9 @@ def _map_values(self, mapper, na_action=None):
raise NotImplementedError
map_f = lambda values, f: values.map(f)
else:
values = self.astype(object)._values
# pandas\core\base.py:1142: error: "IndexOpsMixin" has no attribute
# "astype" [attr-defined]
values = self.astype(object)._values # type: ignore[attr-defined]
if na_action == "ignore":

def map_f(values, f):
Expand Down Expand Up @@ -1110,7 +1158,9 @@ def memory_usage(self, deep=False):
are not components of the array if deep=False or if used on PyPy
"""
if hasattr(self.array, "memory_usage"):
return self.array.memory_usage(deep=deep)
# pandas\core\base.py:1379: error: "ExtensionArray" has no
# attribute "memory_usage" [attr-defined]
return self.array.memory_usage(deep=deep) # type: ignore[attr-defined]

v = self.array.nbytes
if deep and is_object_dtype(self) and not PYPY:
Expand Down Expand Up @@ -1244,7 +1294,9 @@ def searchsorted(self, value, side="left", sorter=None) -> np.ndarray:

def drop_duplicates(self, keep="first"):
duplicated = self.duplicated(keep=keep)
result = self[np.logical_not(duplicated)]
# pandas\core\base.py:1507: error: Value of type "IndexOpsMixin" is not
# indexable [index]
result = self[np.logical_not(duplicated)] # type: ignore[index]
return result

def duplicated(self, keep="first"):
Expand Down
10 changes: 8 additions & 2 deletions pandas/core/computation/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,10 @@ def visit_Call(self, node, side=None, **kwargs):
raise

if res is None:
raise ValueError(f"Invalid function call {node.func.id}")
# pandas\core\computation\expr.py:663: error: "expr" has no
# attribute "id" [attr-defined]
tmp = node.func.id # type: ignore[attr-defined]
raise ValueError(f"Invalid function call {tmp}")
if hasattr(res, "value"):
res = res.value

Expand All @@ -681,7 +684,10 @@ def visit_Call(self, node, side=None, **kwargs):

for key in node.keywords:
if not isinstance(key, ast.keyword):
raise ValueError(f"keyword error in function call '{node.func.id}'")
# pandas\core\computation\expr.py:684: error: "expr" has no
# attribute "id" [attr-defined]
tmp = node.func.id # type: ignore[attr-defined]
raise ValueError(f"keyword error in function call '{tmp}'")

if key.arg:
kwargs[key.arg] = self.visit(key.value).value
Expand Down
Loading