Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use ruff for formatting #8761

Draft
wants to merge 4 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 3 additions & 13 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,12 @@ repos:
- id: mixed-line-ending
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: 'v0.2.0'
rev: 'v0.2.2'
hooks:
- id: ruff
args: ["--fix", "--show-fixes"]
# https://github.com/python/black#version-control-integration
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.1.1
hooks:
- id: black-jupyter
- repo: https://github.com/keewis/blackdoc
rev: v0.3.9
hooks:
- id: blackdoc
exclude: "generate_aggregations.py"
additional_dependencies: ["black==24.1.1"]
- id: blackdoc-autoupdate-black
- id: ruff-format
types_or: [ python, pyi, jupyter ]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.8.0
hooks:
Expand Down
1 change: 0 additions & 1 deletion asv_bench/benchmarks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ def _skip_slow():
>>> from . import _skip_slow
>>> def time_something_slow():
... pass
...
>>> time_something.setup = _skip_slow
"""
if os.environ.get("ASV_SKIP_SLOW", "0") == "1":
Expand Down
3 changes: 2 additions & 1 deletion doc/examples/apply_ufunc_vectorize_1d.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,8 @@
"interped = interped.rename({\"new_lat\": \"lat\"})\n",
"interped[\"lat\"] = newlat # need to add this manually\n",
"xr.testing.assert_allclose(\n",
" expected.transpose(*interped.dims), interped # order of dims is different\n",
" expected.transpose(*interped.dims),\n",
" interped, # order of dims is different\n",
")\n",
"interped"
]
Expand Down
18 changes: 12 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ exclude_lines = ["pragma: no cover", "if TYPE_CHECKING"]
[tool.mypy]
enable_error_code = "redundant-self"
exclude = [
'xarray/util/generate_.*\.py',
'xarray/datatree_/.*\.py',
'xarray/util/generate_.*\.py',
'xarray/datatree_/.*\.py',
]
files = "xarray"
show_error_codes = true
Expand Down Expand Up @@ -247,7 +247,6 @@ reportMissingTypeStubs = false
[tool.ruff]
builtins = ["ellipsis"]
extend-exclude = [
"doc",
"_typed_ops.pyi",
]
target-version = "py39"
Expand All @@ -256,6 +255,9 @@ target-version = "py39"
# E402: module level import not at top of file
# E501: line too long - let black worry about that
# E731: do not assign a lambda expression, use a def
extend-safe-fixes = [
"TID252", # absolute imports
]
ignore = [
"E402",
"E501",
Expand All @@ -269,9 +271,6 @@ select = [
"I", # isort
"UP", # Pyupgrade
]
extend-safe-fixes = [
"TID252", # absolute imports
]

[tool.ruff.lint.per-file-ignores]
# don't enforce absolute imports
Expand All @@ -284,6 +283,13 @@ known-first-party = ["xarray"]
# Disallow all relative imports.
ban-relative-imports = "all"

[tool.ruff.lint.pydocstyle]
convention = "numpy"

[tool.ruff.format]
docstring-code-format = true
preview = false

[tool.pytest.ini_options]
addopts = ["--strict-config", "--strict-markers"]
filterwarnings = [
Expand Down
27 changes: 18 additions & 9 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -1132,7 +1132,8 @@ def to_netcdf(
*,
multifile: Literal[True],
invalid_netcdf: bool = False,
) -> tuple[ArrayWriter, AbstractDataStore]: ...
) -> tuple[ArrayWriter, AbstractDataStore]:
...


# path=None writes to bytes
Expand All @@ -1149,7 +1150,8 @@ def to_netcdf(
compute: bool = True,
multifile: Literal[False] = False,
invalid_netcdf: bool = False,
) -> bytes: ...
) -> bytes:
...


# compute=False returns dask.Delayed
Expand All @@ -1167,7 +1169,8 @@ def to_netcdf(
compute: Literal[False],
multifile: Literal[False] = False,
invalid_netcdf: bool = False,
) -> Delayed: ...
) -> Delayed:
...


# default return None
Expand All @@ -1184,7 +1187,8 @@ def to_netcdf(
compute: Literal[True] = True,
multifile: Literal[False] = False,
invalid_netcdf: bool = False,
) -> None: ...
) -> None:
...


# if compute cannot be evaluated at type check time
Expand All @@ -1202,7 +1206,8 @@ def to_netcdf(
compute: bool = False,
multifile: Literal[False] = False,
invalid_netcdf: bool = False,
) -> Delayed | None: ...
) -> Delayed | None:
...


# if multifile cannot be evaluated at type check time
Expand All @@ -1220,7 +1225,8 @@ def to_netcdf(
compute: bool = False,
multifile: bool = False,
invalid_netcdf: bool = False,
) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None: ...
) -> tuple[ArrayWriter, AbstractDataStore] | Delayed | None:
...


# Any
Expand All @@ -1237,7 +1243,8 @@ def to_netcdf(
compute: bool = False,
multifile: bool = False,
invalid_netcdf: bool = False,
) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None: ...
) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None:
...


def to_netcdf(
Expand Down Expand Up @@ -1671,7 +1678,8 @@ def to_zarr(
zarr_version: int | None = None,
write_empty_chunks: bool | None = None,
chunkmanager_store_kwargs: dict[str, Any] | None = None,
) -> backends.ZarrStore: ...
) -> backends.ZarrStore:
...


# compute=False returns dask.Delayed
Expand All @@ -1694,7 +1702,8 @@ def to_zarr(
zarr_version: int | None = None,
write_empty_chunks: bool | None = None,
chunkmanager_store_kwargs: dict[str, Any] | None = None,
) -> Delayed: ...
) -> Delayed:
...


def to_zarr(
Expand Down
7 changes: 2 additions & 5 deletions xarray/backends/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,13 +235,10 @@ def load(self):
For example::

class SuffixAppendingDataStore(AbstractDataStore):

def load(self):
variables, attributes = AbstractDataStore.load(self)
variables = {'%s_suffix' % k: v
for k, v in variables.items()}
attributes = {'%s_suffix' % k: v
for k, v in attributes.items()}
variables = {"%s_suffix" % k: v for k, v in variables.items()}
attributes = {"%s_suffix" % k: v for k, v in attributes.items()}
return variables, attributes

This function will be called anytime variables or attributes
Expand Down
6 changes: 3 additions & 3 deletions xarray/backends/locks.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ class SerializableLock:
The creation of locks is itself not threadsafe.
"""

_locks: ClassVar[WeakValueDictionary[Hashable, threading.Lock]] = (
WeakValueDictionary()
)
_locks: ClassVar[
WeakValueDictionary[Hashable, threading.Lock]
] = WeakValueDictionary()
token: Hashable
lock: threading.Lock

Expand Down
4 changes: 2 additions & 2 deletions xarray/backends/plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def backends_dict_from_pkg(


def set_missing_parameters(
backend_entrypoints: dict[str, type[BackendEntrypoint]]
backend_entrypoints: dict[str, type[BackendEntrypoint]],
) -> None:
for _, backend in backend_entrypoints.items():
if backend.open_dataset_parameters is None:
Expand All @@ -91,7 +91,7 @@ def set_missing_parameters(


def sort_backends(
backend_entrypoints: dict[str, type[BackendEntrypoint]]
backend_entrypoints: dict[str, type[BackendEntrypoint]],
) -> dict[str, type[BackendEntrypoint]]:
ordered_backends_entrypoints = {}
for be_name in STANDARD_BACKENDS_ORDER:
Expand Down
Loading
Loading