Skip to content

Commit

Permalink
Merge pull request #580 from materialsproject/future-type-annos
Browse files Browse the repository at this point in the history
Future type annotations
  • Loading branch information
janosh authored Oct 19, 2023
2 parents 109d9ef + fde68d9 commit a77db3c
Show file tree
Hide file tree
Showing 11 changed files with 82 additions and 76 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ default_language_version:
python: python3
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.1.0
rev: v0.1.1
hooks:
- id: ruff
args: [--fix]
Expand Down
65 changes: 34 additions & 31 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -142,36 +142,37 @@ exclude_lines = [
]

[tool.ruff]
target-version = "py38"
target-version = "py39"
select = [
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"D", # pydocstyle
"E", # pycodestyle error
"EXE", # flake8-executable
"F", # pyflakes
"FLY", # flynt
"I", # isort
"ICN", # flake8-import-conventions
"ISC", # flake8-implicit-str-concat
"PD", # pandas-vet
"PERF", # perflint
"PGH", # pygrep-hooks
"PIE", # flake8-pie
"PL", # pylint
"PT", # flake8-pytest-style
"PYI", # flakes8-pyi
"Q", # flake8-quotes
"RET", # flake8-return
"RSE", # flake8-raise
"RUF", # Ruff-specific rules
"SIM", # flake8-simplify
"SLOT", # flake8-slots
"TCH", # flake8-type-checking
"TID", # flake8-tidy-imports
"UP", # pyupgrade
"W", # pycodestyle warning
"YTT", # flake8-2020
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"D", # pydocstyle
"E", # pycodestyle error
"EXE", # flake8-executable
"F", # pyflakes
"FA", # flake8-future-annotations
"FBT003", # boolean-positional-value-in-call
"FLY", # flynt
"I", # isort
"ICN", # flake8-import-conventions
"ISC", # flake8-implicit-str-concat
"PD", # pandas-vet
"PERF", # perflint
"PIE", # flake8-pie
"PL", # pylint
"PT", # flake8-pytest-style
"PYI", # flakes8-pyi
"Q", # flake8-quotes
"RET", # flake8-return
"RSE", # flake8-raise
"RUF", # Ruff-specific rules
"SIM", # flake8-simplify
"SLOT", # flake8-slots
"TCH", # flake8-type-checking
"TID", # flake8-tidy-imports
"UP", # pyupgrade
"W", # pycodestyle warning
"YTT", # flake8-2020
]
ignore = [
"PD011", # pandas-use-of-dot-values
Expand All @@ -192,5 +193,7 @@ isort.known-first-party = ["atomate2"]
"**/tests/*" = ["D"]
# flake8-type-checking (TCH): things inside TYPE_CHECKING aren't available
# at runtime and so can't be used by pydantic models
# flake8-future-annotations (FA): future annotations only work in pydantic models in python 3.10+
"**/schemas/*" = ["FA", "TCH"]
# flake8-future-annotations (FA): pipe operator for type unions only work in pydantic models in python 3.10+
"**/schemas/*" = ["FA", "TCH", "UP007"]
"**/schemas.py" = ["FA", "TCH", "UP007"]
"**/settings.py" = ["FA", "TCH", "UP007"]
2 changes: 1 addition & 1 deletion src/atomate2/amset/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def from_directory(
transport=transport,
usage_stats=timing,
kpoint_mesh=inter_mesh,
nkpoints=np.product(inter_mesh),
nkpoints=np.prod(inter_mesh),
log=log,
**mesh_kwargs,
)
Expand Down
2 changes: 2 additions & 0 deletions src/atomate2/cp2k/schemas/calculation.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ class CalculationInput(BaseModel):
)

@field_validator("atomic_kind_info", mode="before")
@classmethod
def remove_unnecessary(cls, atomic_kind_info) -> dict:
"""Remove unnecessary entry from atomic_kind_info."""
for k in atomic_kind_info:
Expand All @@ -91,6 +92,7 @@ def remove_unnecessary(cls, atomic_kind_info) -> dict:
return atomic_kind_info

@field_validator("dft", mode="before")
@classmethod
def cleanup_dft(cls, dft) -> dict:
"""Convert UKS strings to UKS=True."""
if any(v.upper() == "UKS" for v in dft.values()):
Expand Down
8 changes: 4 additions & 4 deletions src/atomate2/forcefields/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,10 +122,10 @@ def make(self, structure: Structure) -> ForceFieldTaskDocument:
return ForceFieldTaskDocument.from_ase_compatible_result(
self.force_field_name,
result,
False,
1,
None,
None,
relax_cell=False,
steps=1,
relax_kwargs=None,
optimizer_kwargs=None,
**self.task_document_kwargs,
)

Expand Down
13 changes: 7 additions & 6 deletions src/atomate2/settings.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Settings for atomate2."""
from __future__ import annotations

import warnings
from pathlib import Path
Expand Down Expand Up @@ -74,12 +75,12 @@ class Atomate2Settings(BaseSettings):
None, description="Store data from these files in database if present"
)
VASP_STORE_ADDITIONAL_JSON: bool = Field(
True,
default=True,
description="Ingest any additional JSON data present into database when "
"parsing VASP directories useful for storing duplicate of FW.json",
)
VASP_RUN_BADER: bool = Field(
False,
default=False,
description="Whether to run the Bader program when parsing VASP calculations."
"Requires the bader executable to be on the path.",
)
Expand All @@ -91,7 +92,7 @@ class Atomate2Settings(BaseSettings):
"to the simulation will be compressed. If False no file is compressed.",
)
VASP_INHERIT_INCAR: bool = Field(
True,
default=True,
description="Whether to inherit INCAR settings from previous calculation. "
"This might be useful to port Custodian fixes to child jobs but can also be "
"dangerous e.g. when switching from GGA to meta-GGA or relax to static jobs."
Expand All @@ -118,7 +119,7 @@ class Atomate2Settings(BaseSettings):
"cp2k.psmp", description="Command to run the MPI version of cp2k"
)
CP2K_RUN_BADER: bool = Field(
False,
default=False,
description="Whether to run the Bader program when parsing CP2K calculations."
"Requires the bader executable to be on the path.",
)
Expand Down Expand Up @@ -149,13 +150,13 @@ class Atomate2Settings(BaseSettings):
None, description="Store data from these files in database if present"
)
CP2K_STORE_ADDITIONAL_JSON: bool = Field(
True,
default=True,
description="Ingest any additional JSON data present into database when "
"parsing CP2K directories useful for storing duplicate of FW.json",
)

CP2K_ZIP_FILES: Union[bool, Literal["atomate"]] = Field(
True,
default=True,
description="Determine if the files in folder are being compressed. If True "
"all the files are compressed. If 'atomate' only a selection of files related "
"to the simulation will be compressed. If False no file is compressed.",
Expand Down
2 changes: 1 addition & 1 deletion src/atomate2/vasp/sets/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1116,7 +1116,7 @@ def _set_kspacing(
# known before calling VASP, but a warning is raised when the KSPACING value is
# > 0.5 (2 reciprocal Angstrom). An error handler in Custodian is available to
# correct overly large KSPACING values (small number of kpoints) if necessary.
if np.product(kpoints.kpts) < 4 and incar.get("ISMEAR", 0) == -5:
if np.prod(kpoints.kpts) < 4 and incar.get("ISMEAR", 0) == -5:
incar["ISMEAR"] = 0

elif "KSPACING" in user_incar_settings:
Expand Down
16 changes: 10 additions & 6 deletions tests/cp2k/conftest.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
from __future__ import annotations

import logging
from collections.abc import Sequence
from hashlib import md5
from pathlib import Path
from typing import Literal, Union
from typing import TYPE_CHECKING, Literal

import pytest

if TYPE_CHECKING:
from collections.abc import Sequence

logger = logging.getLogger("atomate2")

_VFILES = "cp2k.inp"
Expand Down Expand Up @@ -127,7 +131,7 @@ def _run(ref_paths, fake_run_cp2k_kwargs=None):


def fake_run_cp2k(
ref_path: Union[str, Path],
ref_path: str | Path,
input_settings: Sequence[str] = (),
check_inputs: Sequence[Literal["cp2k.inp"]] = _VFILES,
clear_inputs: bool = True,
Expand Down Expand Up @@ -171,8 +175,8 @@ def _check_input(ref_path, user_input):
from pymatgen.io.cp2k.inputs import Cp2kInput

ref = Cp2kInput.from_file(ref_path / "inputs" / "cp2k.inp")
user_input.verbosity(False)
ref.verbosity(False)
user_input.verbosity(verbosity=False)
ref.verbosity(verbosity=False)
user_string = " ".join(user_input.get_string().lower().split())
user_hash = md5(user_string.encode("utf-8")).hexdigest()

Expand All @@ -195,7 +199,7 @@ def clear_cp2k_inputs():
logger.info("Cleared cp2k inputs")


def copy_cp2k_outputs(ref_path: Union[str, Path]):
def copy_cp2k_outputs(ref_path: str | Path):
import shutil

output_path = ref_path / "outputs"
Expand Down
10 changes: 7 additions & 3 deletions tests/vasp/conftest.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
from __future__ import annotations

import logging
from collections.abc import Generator, Sequence
from pathlib import Path
from typing import Any, Callable, Final, Literal, Union
from typing import TYPE_CHECKING, Any, Callable, Final, Literal

import pytest
from pytest import MonkeyPatch

if TYPE_CHECKING:
from collections.abc import Generator, Sequence

logger = logging.getLogger("atomate2")

_VFILES: Final = ("incar", "kpoints", "potcar", "poscar")
_REF_PATHS: dict[str, Union[str, Path]] = {}
_REF_PATHS: dict[str, str | Path] = {}
_FAKE_RUN_VASP_KWARGS: dict[str, dict] = {}


Expand Down
24 changes: 6 additions & 18 deletions tests/vasp/flows/test_phonons.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,7 @@ def test_phonon_wf_only_displacements3(mock_vasp, clean_dir):
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.temperatures, [0, 100, 200, 300, 400]
)
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.has_imaginary_modes, False
)
assert responses[job.jobs[-1].uuid][1].output.has_imaginary_modes is False
assert responses[job.jobs[-1].uuid][1].output.force_constants is None
assert isinstance(responses[job.jobs[-1].uuid][1].output.jobdirs, PhononJobDirs)
assert isinstance(responses[job.jobs[-1].uuid][1].output.uuids, PhononUUIDs)
Expand Down Expand Up @@ -245,9 +243,7 @@ def test_phonon_wf_only_displacements_no_structural_transformation(
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.temperatures, [0, 100, 200, 300, 400]
)
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.has_imaginary_modes, False
)
assert responses[job.jobs[-1].uuid][1].output.has_imaginary_modes is False
assert responses[job.jobs[-1].uuid][1].output.force_constants is None
assert isinstance(responses[job.jobs[-1].uuid][1].output.jobdirs, PhononJobDirs)
assert isinstance(responses[job.jobs[-1].uuid][1].output.uuids, PhononUUIDs)
Expand Down Expand Up @@ -379,9 +375,7 @@ def test_phonon_wf_only_displacements_kpath(mock_vasp, clean_dir, kpathscheme):
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.temperatures, [0, 100, 200, 300, 400]
)
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.has_imaginary_modes, False
)
assert responses[job.jobs[-1].uuid][1].output.has_imaginary_modes is False
assert np.isclose(
responses[job.jobs[-1].uuid][1].output.force_constants.force_constants[0][0][0][
0
Expand Down Expand Up @@ -539,9 +533,7 @@ def test_phonon_wf_only_displacements_add_inputs(mock_vasp, clean_dir):
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.temperatures, [0, 100, 200, 300, 400]
)
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.has_imaginary_modes, False
)
assert responses[job.jobs[-1].uuid][1].output.has_imaginary_modes is False
assert np.isclose(
responses[job.jobs[-1].uuid][1].output.force_constants.force_constants[0][0][0][
0
Expand Down Expand Up @@ -672,9 +664,7 @@ def test_phonon_wf_only_displacements_optional_settings(mock_vasp, clean_dir):
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.temperatures, [0, 100, 200, 300, 400]
)
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.has_imaginary_modes, False
)
assert responses[job.jobs[-1].uuid][1].output.has_imaginary_modes is False
assert responses[job.jobs[-1].uuid][1].output.force_constants is None
assert isinstance(responses[job.jobs[-1].uuid][1].output.jobdirs, PhononJobDirs)
assert isinstance(responses[job.jobs[-1].uuid][1].output.uuids, PhononUUIDs)
Expand Down Expand Up @@ -772,9 +762,7 @@ def test_phonon_wf_all_steps(mock_vasp, clean_dir):
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.temperatures, [0, 100, 200, 300, 400]
)
assert np.allclose(
responses[job.jobs[-1].uuid][1].output.has_imaginary_modes, False
)
assert responses[job.jobs[-1].uuid][1].output.has_imaginary_modes is False
assert np.isclose(
responses[job.jobs[-1].uuid][1].output.force_constants.force_constants[0][0][0][
0
Expand Down
14 changes: 9 additions & 5 deletions tests/vasp/lobster/conftest.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
from __future__ import annotations

import logging
from collections.abc import Sequence
from pathlib import Path
from typing import Literal, Union
from typing import TYPE_CHECKING, Literal

import pytest

if TYPE_CHECKING:
from collections.abc import Sequence

logger = logging.getLogger("atomate2")

_LFILES = "lobsterin"
Expand Down Expand Up @@ -82,7 +86,7 @@ def _run(ref_paths, fake_run_lobster_kwargs):


def fake_run_lobster(
ref_path: Union[str, Path],
ref_path: str | Path,
check_lobster_inputs: Sequence[Literal["lobsterin"]] = _LFILES,
check_dft_inputs: Sequence[Literal["WAVECAR", "POSCAR"]] = _DFT_FILES,
lobsterin_settings: Sequence[str] = (),
Expand Down Expand Up @@ -118,7 +122,7 @@ def fake_run_lobster(
logger.info("ran fake LOBSTER, generated outputs")


def verify_inputs(ref_path: Union[str, Path], lobsterin_settings: Sequence[str]):
def verify_inputs(ref_path: str | Path, lobsterin_settings: Sequence[str]):
from pymatgen.io.lobster import Lobsterin

user = Lobsterin.from_file("lobsterin")
Expand All @@ -131,7 +135,7 @@ def verify_inputs(ref_path: Union[str, Path], lobsterin_settings: Sequence[str])
raise ValueError(f"lobsterin value of {p} is inconsistent!")


def copy_lobster_outputs(ref_path: Union[str, Path]):
def copy_lobster_outputs(ref_path: str | Path):
import shutil

output_path = ref_path / "outputs"
Expand Down

0 comments on commit a77db3c

Please sign in to comment.