Skip to content

Commit

Permalink
docs: fix word spellings (#4264)
Browse files Browse the repository at this point in the history
Signed-off-by: Jinzhe Zeng <jinzhe.zeng@rutgers.edu>
Co-authored-by: Han Wang <92130845+wanghan-iapcm@users.noreply.github.com>
  • Loading branch information
njzjz and wanghan-iapcm authored Oct 28, 2024
1 parent 04e1159 commit 8f546cf
Show file tree
Hide file tree
Showing 196 changed files with 448 additions and 442 deletions.
2 changes: 1 addition & 1 deletion backend/read_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def get_argument_from_env() -> tuple[str, list, list, dict, str, str]:
"""
cmake_args = []
extra_scripts = {}
# get variant option from the environment varibles, available: cpu, cuda, rocm
# get variant option from the environment variables, available: cpu, cuda, rocm
dp_variant = os.environ.get("DP_VARIANT", "cpu").lower()
if dp_variant == "cpu" or dp_variant == "":
cmake_minimum_required_version = "3.16"
Expand Down
2 changes: 1 addition & 1 deletion deepmd/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@


def DeepPotential(*args, **kwargs):
"""Factory function that forwards to DeepEval (for compatbility
"""Factory function that forwards to DeepEval (for compatibility
and performance).
Parameters
Expand Down
2 changes: 1 addition & 1 deletion deepmd/backend/suffix.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def format_model_suffix(
"""Check and format the suffixes of a filename.
When preferred_backend is not given, this method checks the suffix of the filename
is within the suffixes of the any backends (with the given feature) and doesn't do formating.
is within the suffixes of the any backends (with the given feature) and doesn't do formatting.
When preferred_backend is given, strict_prefer must be given.
If strict_prefer is True and the suffix is not within the suffixes of the preferred backend,
or strict_prefer is False and the suffix is not within the suffixes of the any backend with the given feature,
Expand Down
2 changes: 1 addition & 1 deletion deepmd/calculator.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
class DP(Calculator):
"""Implementation of ASE deepmd calculator.
Implemented propertie are `energy`, `forces` and `stress`
Implemented properties are `energy`, `forces` and `stress`
Parameters
----------
Expand Down
6 changes: 3 additions & 3 deletions deepmd/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def select_idx_map(atom_types: np.ndarray, select_types: np.ndarray) -> np.ndarr
Parameters
----------
atom_types : np.ndarray
array specifing type for each atoms as integer
array specifying type for each atoms as integer
select_types : np.ndarray
types of atoms you want to find indices for
Expand Down Expand Up @@ -126,7 +126,7 @@ def make_default_mesh(pbc: bool, mixed_type: bool) -> np.ndarray:
def j_deprecated(
jdata: dict[str, "_DICT_VAL"], key: str, deprecated_key: list[str] = []
) -> "_DICT_VAL":
"""Assert that supplied dictionary conaines specified key.
"""Assert that supplied dictionary contains specified key.
Parameters
----------
Expand Down Expand Up @@ -218,7 +218,7 @@ def get_np_precision(precision: "_PRECISION") -> np.dtype:
Returns
-------
np.dtype
numpy presicion constant
numpy precision constant
Raises
------
Expand Down
2 changes: 1 addition & 1 deletion deepmd/dpmodel/atomic_model/base_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def forward_common_atomic(
Parameters
----------
extended_coord
extended coodinates, shape: nf x (nall x 3)
extended coordinates, shape: nf x (nall x 3)
extended_atype
extended atom typs, shape: nf x nall
for a type < 0 indicating the atomic is virtual.
Expand Down
4 changes: 2 additions & 2 deletions deepmd/dpmodel/atomic_model/dp_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def forward_atomic(
Parameters
----------
extended_coord
coodinates in extended region
coordinates in extended region
extended_atype
atomic type in extended region
nlist
Expand Down Expand Up @@ -169,7 +169,7 @@ def serialize(self) -> dict:
)
return dd

# for subclass overriden
# for subclass overridden
base_descriptor_cls = BaseDescriptor
"""The base descriptor class."""
base_fitting_cls = BaseFitting
Expand Down
4 changes: 2 additions & 2 deletions deepmd/dpmodel/atomic_model/linear_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def forward_atomic(
Parameters
----------
extended_coord
coodinates in extended region, (nframes, nall * 3)
coordinates in extended region, (nframes, nall * 3)
extended_atype
atomic type in extended region, (nframes, nall)
nlist
Expand Down Expand Up @@ -341,7 +341,7 @@ class DPZBLLinearEnergyAtomicModel(LinearEnergyAtomicModel):
Mapping atom type to the name (str) of the type.
For example `type_map[1]` gives the name of the type 1.
smin_alpha
The short-range tabulated interaction will be swithed according to the distance of the nearest neighbor.
The short-range tabulated interaction will be switched according to the distance of the nearest neighbor.
This distance is calculated by softmin.
"""

Expand Down
4 changes: 2 additions & 2 deletions deepmd/dpmodel/atomic_model/make_base_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def make_atom_mask(
self,
atype: t_tensor,
) -> t_tensor:
"""The atoms with type < 0 are treated as virutal atoms,
"""The atoms with type < 0 are treated as virtual atoms,
which serves as place-holders for multi-frame calculations
with different number of atoms in different frames.
Expand All @@ -164,7 +164,7 @@ def make_atom_mask(
Returns
-------
mask
True for real atoms and False for virutal atoms.
True for real atoms and False for virtual atoms.
"""
# supposed to be supported by all backends
Expand Down
2 changes: 1 addition & 1 deletion deepmd/dpmodel/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"int64": np.int64,
"bool": bool,
"default": GLOBAL_NP_FLOAT_PRECISION,
# NumPy doesn't have bfloat16 (and does't plan to add)
# NumPy doesn't have bfloat16 (and doesn't plan to add)
# ml_dtypes is a solution, but it seems not supporting np.save/np.load
# hdf5 hasn't supported bfloat16 as well (see https://forum.hdfgroup.org/t/11975)
"bfloat16": ml_dtypes.bfloat16,
Expand Down
2 changes: 1 addition & 1 deletion deepmd/dpmodel/descriptor/descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
If not start from checkpoint (resume is False),
some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes.
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
raise NotImplementedError

Expand Down
14 changes: 7 additions & 7 deletions deepmd/dpmodel/descriptor/dpa1.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,11 +358,11 @@ def get_dim_emb(self) -> int:
return self.se_atten.dim_emb

def mixed_types(self) -> bool:
"""If true, the discriptor
"""If true, the descriptor
1. assumes total number of atoms aligned across frames;
2. requires a neighbor list that does not distinguish different atomic types.
If false, the discriptor
If false, the descriptor
1. assumes total number of atoms of each atom type aligned across frames;
2. requires a neighbor list that distinguishes different atomic types.
Expand All @@ -385,7 +385,7 @@ def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
If not start from checkpoint (resume is False),
some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes.
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
raise NotImplementedError

Expand Down Expand Up @@ -459,7 +459,7 @@ def call(
nlist
The neighbor list. shape: nf x nloc x nnei
mapping
The index mapping from extended to lcoal region. not used by this descriptor.
The index mapping from extended to local region. not used by this descriptor.
Returns
-------
Expand Down Expand Up @@ -602,7 +602,7 @@ def update_sel(
Parameters
----------
train_data : DeepmdDataSystem
data used to do neighbor statictics
data used to do neighbor statistics
type_map : list[str], optional
The name of each type of atoms
local_jdata : dict
Expand Down Expand Up @@ -793,11 +793,11 @@ def __getitem__(self, key):
raise KeyError(key)

def mixed_types(self) -> bool:
"""If true, the discriptor
"""If true, the descriptor
1. assumes total number of atoms aligned across frames;
2. requires a neighbor list that does not distinguish different atomic types.
If false, the discriptor
If false, the descriptor
1. assumes total number of atoms of each atom type aligned across frames;
2. requires a neighbor list that distinguishes different atomic types.
Expand Down
8 changes: 4 additions & 4 deletions deepmd/dpmodel/descriptor/dpa2.py
Original file line number Diff line number Diff line change
Expand Up @@ -624,11 +624,11 @@ def get_dim_emb(self) -> int:
return self.repformers.dim_emb

def mixed_types(self) -> bool:
"""If true, the discriptor
"""If true, the descriptor
1. assumes total number of atoms aligned across frames;
2. requires a neighbor list that does not distinguish different atomic types.
If false, the discriptor
If false, the descriptor
1. assumes total number of atoms of each atom type aligned across frames;
2. requires a neighbor list that distinguishes different atomic types.
Expand All @@ -653,7 +653,7 @@ def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
If not start from checkpoint (resume is False),
some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes.
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
raise NotImplementedError

Expand Down Expand Up @@ -1021,7 +1021,7 @@ def update_sel(
Parameters
----------
train_data : DeepmdDataSystem
data used to do neighbor statictics
data used to do neighbor statistics
type_map : list[str], optional
The name of each type of atoms
local_jdata : dict
Expand Down
6 changes: 3 additions & 3 deletions deepmd/dpmodel/descriptor/hybrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def __init__(
for ii in range(1, self.numb_descrpt):
assert (
self.descrpt_list[ii].get_ntypes() == self.descrpt_list[0].get_ntypes()
), f"number of atom types in {ii}th descrptor {self.descrpt_list[0].__class__.__name__} does not match others"
), f"number of atom types in {ii}th descriptor {self.descrpt_list[0].__class__.__name__} does not match others"
# if hybrid sel is larger than sub sel, the nlist needs to be cut for each type
hybrid_sel = self.get_sel()
self.nlist_cut_idx: list[np.ndarray] = []
Expand Down Expand Up @@ -161,7 +161,7 @@ def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
If not start from checkpoint (resume is False),
some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes.
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
raise NotImplementedError

Expand Down Expand Up @@ -284,7 +284,7 @@ def update_sel(
Parameters
----------
train_data : DeepmdDataSystem
data used to do neighbor statictics
data used to do neighbor statistics
type_map : list[str], optional
The name of each type of atoms
local_jdata : dict
Expand Down
4 changes: 2 additions & 2 deletions deepmd/dpmodel/descriptor/make_base_descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
If not start from checkpoint (resume is False),
some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes.
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
pass

Expand Down Expand Up @@ -194,7 +194,7 @@ def update_sel(
Parameters
----------
train_data : DeepmdDataSystem
data used to do neighbor statictics
data used to do neighbor statistics
type_map : list[str], optional
The name of each type of atoms
local_jdata : dict
Expand Down
8 changes: 4 additions & 4 deletions deepmd/dpmodel/descriptor/repformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,11 +307,11 @@ def __getitem__(self, key):
raise KeyError(key)

def mixed_types(self) -> bool:
"""If true, the discriptor
"""If true, the descriptor
1. assumes total number of atoms aligned across frames;
2. requires a neighbor list that does not distinguish different atomic types.
If false, the discriptor
If false, the descriptor
1. assumes total number of atoms of each atom type aligned across frames;
2. requires a neighbor list that distinguishes different atomic types.
Expand Down Expand Up @@ -1480,7 +1480,7 @@ def call(
"""
Parameters
----------
g1_ext : nf x nall x ng1 extended single-atom chanel
g1_ext : nf x nall x ng1 extended single-atom channel
g2 : nf x nloc x nnei x ng2 pair-atom channel, invariant
h2 : nf x nloc x nnei x 3 pair-atom channel, equivariant
nlist : nf x nloc x nnei neighbor list (padded neis are set to 0)
Expand All @@ -1489,7 +1489,7 @@ def call(
Returns
-------
g1: nf x nloc x ng1 updated single-atom chanel
g1: nf x nloc x ng1 updated single-atom channel
g2: nf x nloc x nnei x ng2 updated pair-atom channel, invariant
h2: nf x nloc x nnei x 3 updated pair-atom channel, equivariant
"""
Expand Down
8 changes: 4 additions & 4 deletions deepmd/dpmodel/descriptor/se_e2_a.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
If not start from checkpoint (resume is False),
some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes.
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
raise NotImplementedError

Expand Down Expand Up @@ -359,7 +359,7 @@ def call(
nlist
The neighbor list. shape: nf x nloc x nnei
mapping
The index mapping from extended to lcoal region. not used by this descriptor.
The index mapping from extended to local region. not used by this descriptor.
Returns
-------
Expand Down Expand Up @@ -486,7 +486,7 @@ def update_sel(
Parameters
----------
train_data : DeepmdDataSystem
data used to do neighbor statictics
data used to do neighbor statistics
type_map : list[str], optional
The name of each type of atoms
local_jdata : dict
Expand Down Expand Up @@ -525,7 +525,7 @@ def call(
nlist
The neighbor list. shape: nf x nloc x nnei
mapping
The index mapping from extended to lcoal region. not used by this descriptor.
The index mapping from extended to local region. not used by this descriptor.
Returns
-------
Expand Down
8 changes: 4 additions & 4 deletions deepmd/dpmodel/descriptor/se_r.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
@BaseDescriptor.register("se_e2_r")
@BaseDescriptor.register("se_r")
class DescrptSeR(NativeOP, BaseDescriptor):
r"""DeepPot-SE_R constructed from only the radial imformation of atomic configurations.
r"""DeepPot-SE_R constructed from only the radial information of atomic configurations.
Parameters
Expand Down Expand Up @@ -237,7 +237,7 @@ def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
If not start from checkpoint (resume is False),
some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes.
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
raise NotImplementedError

Expand Down Expand Up @@ -308,7 +308,7 @@ def call(
nlist
The neighbor list. shape: nf x nloc x nnei
mapping
The index mapping from extended to lcoal region. not used by this descriptor.
The index mapping from extended to local region. not used by this descriptor.
Returns
-------
Expand Down Expand Up @@ -414,7 +414,7 @@ def update_sel(
Parameters
----------
train_data : DeepmdDataSystem
data used to do neighbor statictics
data used to do neighbor statistics
type_map : list[str], optional
The name of each type of atoms
local_jdata : dict
Expand Down
Loading

0 comments on commit 8f546cf

Please sign in to comment.