Skip to content

Commit

Permalink
fix(2024Q1): fix lammps nlist sort with large sel (#3994)
Browse files Browse the repository at this point in the history
Copy from #3993 .
  • Loading branch information
iProzd authored Aug 1, 2024
2 parents c09a1f7 + 0e2e0f6 commit 6d6fbb3
Show file tree
Hide file tree
Showing 32 changed files with 175 additions and 16 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/build_wheel.yml
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,8 @@ jobs:
- variant: "_cu11"
cuda_version: "11"
steps:
- name: Delete huge unnecessary tools folder
run: rm -rf /opt/hostedtoolcache
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test_cuda.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
&& sudo apt-get -y install cuda-12-3 libcudnn8=8.9.5.*-1+cuda12.3
if: false # skip as we use nvidia image
- run: python -m pip install -U "pip>=21.3.1,!=23.0.0"
- run: python -m pip install "tensorflow>=2.15.0rc0" "torch>=2.2.0"
- run: python -m pip install "tensorflow>=2.15.0rc0" "torch==2.3.1.*"
- run: python -m pip install -v -e .[gpu,test,lmp,cu12,torch] mpi4py "ase @ https://gitlab.com/ase/ase/-/archive/8c5aa5fd6448c5cfb517a014dccf2b214a9dfa8f/ase-8c5aa5fd6448c5cfb517a014dccf2b214a9dfa8f.tar.gz"
env:
DP_VARIANT: cuda
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/atomic_model/dp_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,10 @@ def get_out_bias(self) -> np.ndarray:
"""Return the output bias of the atomic model."""
return self.fitting["bias_atom_e"]

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the atomic model needs sorted nlist when using `forward_lower`."""
return self.descriptor.need_sorted_nlist_for_lower()

def forward_atomic(
self,
extended_coord: np.ndarray,
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/atomic_model/linear_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,10 @@ def _sort_rcuts_sels(self) -> Tuple[List[float], List[int]]:
)
return [p[0] for p in zipped], [p[1] for p in zipped]

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the atomic model needs sorted nlist when using `forward_lower`."""
return True

def forward_atomic(
self,
extended_coord,
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/atomic_model/make_base_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,10 @@ def mixed_types(self) -> bool:
"""
pass

@abstractmethod
def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""

@abstractmethod
def fwd(
self,
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/atomic_model/pairtab_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,10 @@ def deserialize(cls, data) -> "PairTabAtomicModel":
tab_model.tab_data = tab_model.tab.tab_data.reshape(ntypes, ntypes, nspline, 4)
return tab_model

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the atomic model needs sorted nlist when using `forward_lower`."""
return False

def forward_atomic(
self,
extended_coord,
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/descriptor/hybrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,10 @@ def compute_input_stats(self, merged: List[dict], path: Optional[DPPath] = None)
for descrpt in self.descrpt_list:
descrpt.compute_input_stats(merged, path)

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""
return True

def call(
self,
coord_ext,
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/descriptor/make_base_descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,10 @@ def compute_input_stats(
"""Update mean and stddev for descriptor elements."""
raise NotImplementedError

@abstractmethod
def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""

@abstractmethod
def fwd(
self,
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/descriptor/se_e2_a.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,10 @@ def reinit_exclude(
self.exclude_types = exclude_types
self.emask = PairExcludeMask(self.ntypes, exclude_types=exclude_types)

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""
return False

def call(
self,
coord_ext,
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/descriptor/se_r.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,10 @@ def cal_g(
gg = self.embeddings[(ll,)].call(ss)
return gg

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""
return False

def call(
self,
coord_ext,
Expand Down
33 changes: 27 additions & 6 deletions deepmd/dpmodel/model/make_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,12 @@ def call_lower(
"""
nframes, nall = extended_atype.shape[:2]
extended_coord = extended_coord.reshape(nframes, -1, 3)
nlist = self.format_nlist(extended_coord, extended_atype, nlist)
nlist = self.format_nlist(
extended_coord,
extended_atype,
nlist,
extra_nlist_sort=self.need_sorted_nlist_for_lower(),
)
cc_ext, _, fp, ap, input_prec = self.input_type_cast(
extended_coord, fparam=fparam, aparam=aparam
)
Expand Down Expand Up @@ -309,6 +314,7 @@ def format_nlist(
extended_coord: np.ndarray,
extended_atype: np.ndarray,
nlist: np.ndarray,
extra_nlist_sort: bool = False,
):
"""Format the neighbor list.
Expand All @@ -334,6 +340,8 @@ def format_nlist(
atomic type in extended region. nf x nall
nlist
neighbor list. nf x nloc x nsel
extra_nlist_sort
whether to forcibly sort the nlist.
Returns
-------
Expand All @@ -343,7 +351,12 @@ def format_nlist(
"""
n_nf, n_nloc, n_nnei = nlist.shape
mixed_types = self.mixed_types()
ret = self._format_nlist(extended_coord, nlist, sum(self.get_sel()))
ret = self._format_nlist(
extended_coord,
nlist,
sum(self.get_sel()),
extra_nlist_sort=extra_nlist_sort,
)
if not mixed_types:
ret = nlist_distinguish_types(ret, extended_atype, self.get_sel())
return ret
Expand All @@ -353,6 +366,7 @@ def _format_nlist(
extended_coord: np.ndarray,
nlist: np.ndarray,
nnei: int,
extra_nlist_sort: bool = False,
):
n_nf, n_nloc, n_nnei = nlist.shape
extended_coord = extended_coord.reshape([n_nf, -1, 3])
Expand All @@ -368,8 +382,9 @@ def _format_nlist(
],
axis=-1,
)
elif n_nnei > nnei:
# make a copy before revise

if n_nnei > nnei or extra_nlist_sort:
n_nf, n_nloc, n_nnei = nlist.shape
m_real_nei = nlist >= 0
ret = np.where(m_real_nei, nlist, 0)
coord0 = extended_coord[:, :n_nloc, :]
Expand All @@ -382,9 +397,11 @@ def _format_nlist(
ret = np.take_along_axis(ret, ret_mapping, axis=2)
ret = np.where(rr > rcut, -1, ret)
ret = ret[..., :nnei]
else: # n_nnei == nnei:
# copy anyway...
# not extra_nlist_sort and n_nnei <= nnei:
elif n_nnei == nnei:
ret = nlist
else:
pass
assert ret.shape[-1] == nnei
return ret

Expand Down Expand Up @@ -469,6 +486,10 @@ def mixed_types(self) -> bool:
"""
return self.atomic_model.mixed_types()

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the model needs sorted nlist when using `forward_lower`."""
return self.atomic_model.need_sorted_nlist_for_lower()

def atomic_output_def(self) -> FittingOutputDef:
"""Get the output def of the atomic model."""
return self.atomic_model.atomic_output_def()
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/atomic_model/dp_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,10 @@ def mixed_types(self) -> bool:
"""
return self.descriptor.mixed_types()

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the atomic model needs sorted nlist when using `forward_lower`."""
return self.descriptor.need_sorted_nlist_for_lower()

def serialize(self) -> dict:
dd = BaseAtomicModel.serialize(self)
dd.update(
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/atomic_model/linear_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,10 @@ def _sort_rcuts_sels(self) -> Tuple[List[float], List[int]]:
sorted_sels: List[int] = outer_sorted[:, 1].to(torch.int64).tolist()
return sorted_rcuts, sorted_sels

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the atomic model needs sorted nlist when using `forward_lower`."""
return True

def forward_atomic(
self,
extended_coord: torch.Tensor,
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/atomic_model/pairtab_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,10 @@ def mixed_types(self) -> bool:
# to match DPA1 and DPA2.
return True

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the atomic model needs sorted nlist when using `forward_lower`."""
return False

def serialize(self) -> dict:
dd = BaseAtomicModel.serialize(self)
dd.update(
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/descriptor/descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,10 @@ def forward(
"""Calculate DescriptorBlock."""
pass

@abstractmethod
def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor block needs sorted nlist when using `forward_lower`."""


def make_default_type_embedding(
ntypes,
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/descriptor/dpa1.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,10 @@ def mixed_types(self) -> bool:
"""
return self.se_atten.mixed_types()

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""
return self.se_atten.need_sorted_nlist_for_lower()

def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/descriptor/dpa2.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,10 @@ def mixed_types(self) -> bool:
"""
return True

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""
return True

def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
Expand Down
8 changes: 8 additions & 0 deletions deepmd/pt/model/descriptor/hybrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,10 @@ def mixed_types(self):
"""
return any(descrpt.mixed_types() for descrpt in self.descrpt_list)

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""
return True

def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
Expand Down Expand Up @@ -369,6 +373,10 @@ def mixed_types(self) -> bool:
"""
return all(descriptor.mixed_types() for descriptor in self.descriptor_list)

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor block needs sorted nlist when using `forward_lower`."""
return True

@property
def dim_out(self):
"""Returns the output dimension of this descriptor."""
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/descriptor/repformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,10 @@ def mixed_types(self) -> bool:
"""
return True

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor block needs sorted nlist when using `forward_lower`."""
return False

@property
def dim_out(self):
"""Returns the output dimension of this descriptor."""
Expand Down
8 changes: 8 additions & 0 deletions deepmd/pt/model/descriptor/se_a.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,10 @@ def mixed_types(self):
"""
return self.sea.mixed_types()

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""
return self.sea.need_sorted_nlist_for_lower()

def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
Expand Down Expand Up @@ -433,6 +437,10 @@ def mixed_types(self) -> bool:
"""
return False

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor block needs sorted nlist when using `forward_lower`."""
return False

@property
def dim_out(self):
"""Returns the output dimension of this descriptor."""
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/descriptor/se_atten.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,10 @@ def mixed_types(self) -> bool:
"""
return True

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor block needs sorted nlist when using `forward_lower`."""
return False

@property
def dim_out(self):
"""Returns the output dimension of this descriptor."""
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/descriptor/se_r.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,10 @@ def mixed_types(self) -> bool:
"""
return False

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the descriptor needs sorted nlist when using `forward_lower`."""
return False

def share_params(self, base_class, shared_level, resume=False):
"""
Share the parameters of self to the base_class with shared_level during multitask training.
Expand Down
1 change: 1 addition & 0 deletions deepmd/pt/model/model/dipole_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ def forward_lower(
fparam=fparam,
aparam=aparam,
do_atomic_virial=do_atomic_virial,
extra_nlist_sort=self.need_sorted_nlist_for_lower(),
)
if self.get_fitting_net() is not None:
model_predict = {}
Expand Down
1 change: 1 addition & 0 deletions deepmd/pt/model/model/dos_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def forward_lower(
fparam=fparam,
aparam=aparam,
do_atomic_virial=do_atomic_virial,
extra_nlist_sort=self.need_sorted_nlist_for_lower(),
)
if self.get_fitting_net() is not None:
model_predict = {}
Expand Down
1 change: 1 addition & 0 deletions deepmd/pt/model/model/dp_zbl_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ def forward_lower(
fparam=fparam,
aparam=aparam,
do_atomic_virial=do_atomic_virial,
extra_nlist_sort=self.need_sorted_nlist_for_lower(),
)

model_predict = {}
Expand Down
1 change: 1 addition & 0 deletions deepmd/pt/model/model/ener_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ def forward_lower(
fparam=fparam,
aparam=aparam,
do_atomic_virial=do_atomic_virial,
extra_nlist_sort=self.need_sorted_nlist_for_lower(),
)
if self.get_fitting_net() is not None:
model_predict = {}
Expand Down
4 changes: 4 additions & 0 deletions deepmd/pt/model/model/frozen.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,10 @@ def mixed_types(self) -> bool:
"""
return self.model.mixed_types()

def need_sorted_nlist_for_lower(self) -> bool:
"""Returns whether the model needs sorted nlist when using `forward_lower`."""
return self.model.need_sorted_nlist_for_lower()

@torch.jit.export
def forward(
self,
Expand Down
Loading

0 comments on commit 6d6fbb3

Please sign in to comment.