Skip to content

Commit

Permalink
Merge branch 'devel' into add_universal_ut
Browse files Browse the repository at this point in the history
  • Loading branch information
iProzd authored Jun 27, 2024
2 parents 87187cf + 58b8944 commit 28d32ba
Show file tree
Hide file tree
Showing 8 changed files with 55 additions and 16 deletions.
3 changes: 1 addition & 2 deletions deepmd/dpmodel/fitting/dipole_fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,10 +108,8 @@ def __init__(
c_differentiable: bool = True,
type_map: Optional[List[str]] = None,
old_impl=False,
# not used
seed: Optional[Union[int, List[int]]] = None,
):
# seed, uniform_seed are not included
if tot_ener_zero:
raise NotImplementedError("tot_ener_zero is not implemented")
if spin is not None:
Expand Down Expand Up @@ -143,6 +141,7 @@ def __init__(
mixed_types=mixed_types,
exclude_types=exclude_types,
type_map=type_map,
seed=seed,
)
self.old_impl = False

Expand Down
2 changes: 1 addition & 1 deletion deepmd/dpmodel/fitting/dos_fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def __init__(
mixed_types: bool = False,
exclude_types: List[int] = [],
type_map: Optional[List[str]] = None,
# not used
seed: Optional[Union[int, List[int]]] = None,
):
if bias_dos is not None:
Expand All @@ -69,6 +68,7 @@ def __init__(
mixed_types=mixed_types,
exclude_types=exclude_types,
type_map=type_map,
seed=seed,
)

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion deepmd/dpmodel/fitting/ener_fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ def __init__(
mixed_types: bool = False,
exclude_types: List[int] = [],
type_map: Optional[List[str]] = None,
# not used
seed: Optional[Union[int, List[int]]] = None,
):
super().__init__(
Expand All @@ -70,6 +69,7 @@ def __init__(
mixed_types=mixed_types,
exclude_types=exclude_types,
type_map=type_map,
seed=seed,
)

@classmethod
Expand Down
4 changes: 3 additions & 1 deletion deepmd/dpmodel/fitting/invar_fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Dict,
List,
Optional,
Union,
)

import numpy as np
Expand Down Expand Up @@ -134,8 +135,8 @@ def __init__(
mixed_types: bool = True,
exclude_types: List[int] = [],
type_map: Optional[List[str]] = None,
seed: Optional[Union[int, List[int]]] = None,
):
# seed, uniform_seed are not included
if tot_ener_zero:
raise NotImplementedError("tot_ener_zero is not implemented")
if spin is not None:
Expand Down Expand Up @@ -172,6 +173,7 @@ def __init__(
if atom_ener is None or len([x for x in atom_ener if x is not None]) == 0
else [x is not None for x in atom_ener],
type_map=type_map,
seed=seed,
)

def serialize(self) -> dict:
Expand Down
3 changes: 1 addition & 2 deletions deepmd/dpmodel/fitting/polarizability_fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,10 +114,8 @@ def __init__(
scale: Optional[List[float]] = None,
shift_diag: bool = True,
type_map: Optional[List[str]] = None,
# not used
seed: Optional[Union[int, List[int]]] = None,
):
# seed, uniform_seed are not included
if tot_ener_zero:
raise NotImplementedError("tot_ener_zero is not implemented")
if spin is not None:
Expand Down Expand Up @@ -167,6 +165,7 @@ def __init__(
mixed_types=mixed_types,
exclude_types=exclude_types,
type_map=type_map,
seed=seed,
)
self.old_impl = False

Expand Down
6 changes: 6 additions & 0 deletions source/api_cc/include/DeepPotPT.h
Original file line number Diff line number Diff line change
Expand Up @@ -329,6 +329,12 @@ class DeepPotPT : public DeepPotBase {
bool gpu_enabled;
at::Tensor firstneigh_tensor;
torch::Dict<std::string, torch::Tensor> comm_dict;
/**
* @brief Translate PyTorch exceptions to the DeePMD-kit exception.
* @param[in] f The function to run.
* @example translate_error([&](){...});
*/
void translate_error(std::function<void()> f);
};

} // namespace deepmd
47 changes: 38 additions & 9 deletions source/api_cc/src/DeepPotPT.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,34 @@
#ifdef BUILD_PYTORCH
#include "DeepPotPT.h"

#include <torch/csrc/jit/runtime/jit_exception.h>

#include <cstdint>

#include "common.h"
#include "device.h"
#include "errors.h"

using namespace deepmd;

void DeepPotPT::translate_error(std::function<void()> f) {
try {
f();
// it seems that libtorch may throw different types of exceptions which are
// inherbited from different base classes
// https://github.com/pytorch/pytorch/blob/13316a8d4642454012d34da0d742f1ba93fc0667/torch/csrc/jit/runtime/interpreter.cpp#L924-L939
} catch (const c10::Error& e) {
throw deepmd::deepmd_exception("DeePMD-kit PyTorch backend error: " +
std::string(e.what()));
} catch (const torch::jit::JITException& e) {
throw deepmd::deepmd_exception("DeePMD-kit PyTorch backend JIT error: " +
std::string(e.what()));
} catch (const std::runtime_error& e) {
throw deepmd::deepmd_exception("DeePMD-kit PyTorch backend error: " +
std::string(e.what()));
}
}

torch::Tensor createNlistTensor(const std::vector<std::vector<int>>& data) {
std::vector<torch::Tensor> row_tensors;

Expand All @@ -26,7 +47,7 @@ DeepPotPT::DeepPotPT(const std::string& model,
const std::string& file_content)
: inited(false) {
try {
init(model, gpu_rank, file_content);
translate_error([&] { init(model, gpu_rank, file_content); });
} catch (...) {
// Clean up and rethrow, as the destructor will not be called
throw;
Expand Down Expand Up @@ -444,8 +465,10 @@ void DeepPotPT::computew(std::vector<double>& ener,
const std::vector<double>& box,
const std::vector<double>& fparam,
const std::vector<double>& aparam) {
compute(ener, force, virial, atom_energy, atom_virial, coord, atype, box,
fparam, aparam);
translate_error([&] {
compute(ener, force, virial, atom_energy, atom_virial, coord, atype, box,
fparam, aparam);
});
}
void DeepPotPT::computew(std::vector<double>& ener,
std::vector<float>& force,
Expand All @@ -457,8 +480,10 @@ void DeepPotPT::computew(std::vector<double>& ener,
const std::vector<float>& box,
const std::vector<float>& fparam,
const std::vector<float>& aparam) {
compute(ener, force, virial, atom_energy, atom_virial, coord, atype, box,
fparam, aparam);
translate_error([&] {
compute(ener, force, virial, atom_energy, atom_virial, coord, atype, box,
fparam, aparam);
});
}
void DeepPotPT::computew(std::vector<double>& ener,
std::vector<double>& force,
Expand All @@ -473,8 +498,10 @@ void DeepPotPT::computew(std::vector<double>& ener,
const int& ago,
const std::vector<double>& fparam,
const std::vector<double>& aparam) {
compute(ener, force, virial, atom_energy, atom_virial, coord, atype, box,
nghost, inlist, ago, fparam, aparam);
translate_error([&] {
compute(ener, force, virial, atom_energy, atom_virial, coord, atype, box,
nghost, inlist, ago, fparam, aparam);
});
}
void DeepPotPT::computew(std::vector<double>& ener,
std::vector<float>& force,
Expand All @@ -489,8 +516,10 @@ void DeepPotPT::computew(std::vector<double>& ener,
const int& ago,
const std::vector<float>& fparam,
const std::vector<float>& aparam) {
compute(ener, force, virial, atom_energy, atom_virial, coord, atype, box,
nghost, inlist, ago, fparam, aparam);
translate_error([&] {
compute(ener, force, virial, atom_energy, atom_virial, coord, atype, box,
nghost, inlist, ago, fparam, aparam);
});
}
void DeepPotPT::computew_mixed_type(std::vector<double>& ener,
std::vector<double>& force,
Expand Down
4 changes: 4 additions & 0 deletions source/api_cc/tests/test_deepmd_exception.cc
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ TEST(TestDeepmdException, deepmdexception_nofile_deeppot) {
ASSERT_THROW(deepmd::DeepPot("_no_such_file.pb"), deepmd::deepmd_exception);
}

TEST(TestDeepmdException, deepmdexception_nofile_deeppot_pt) {
ASSERT_THROW(deepmd::DeepPot("_no_such_file.pth"), deepmd::deepmd_exception);
}

TEST(TestDeepmdException, deepmdexception_nofile_deeppotmodeldevi) {
ASSERT_THROW(
deepmd::DeepPotModelDevi({"_no_such_file.pb", "_no_such_file.pb"}),
Expand Down

0 comments on commit 28d32ba

Please sign in to comment.