diff --git a/deepmd/pt/train/training.py b/deepmd/pt/train/training.py index 3b8b5a435c..2aa672bd60 100644 --- a/deepmd/pt/train/training.py +++ b/deepmd/pt/train/training.py @@ -31,7 +31,6 @@ TensorLoss, ) from deepmd.pt.model.model import ( - DOSModel, get_model, get_zbl_model, ) @@ -601,15 +600,13 @@ def single_model_finetune( _finetune_rule_single, _sample_func, ): - # need fix for DOSModel - if not isinstance(_model, DOSModel): - _model = _model_change_out_bias( - _model, - _sample_func, - _bias_adjust_mode="change-by-statistic" - if not _finetune_rule_single.get_random_fitting() - else "set-by-statistic", - ) + _model = _model_change_out_bias( + _model, + _sample_func, + _bias_adjust_mode="change-by-statistic" + if not _finetune_rule_single.get_random_fitting() + else "set-by-statistic", + ) return _model if not self.multi_task: diff --git a/deepmd/pt/utils/stat.py b/deepmd/pt/utils/stat.py index b96310a2e6..b0bbda5dbe 100644 --- a/deepmd/pt/utils/stat.py +++ b/deepmd/pt/utils/stat.py @@ -399,6 +399,10 @@ def compute_output_stats_global( model_pred: Optional[Dict[str, np.ndarray]] = None, ): """This function only handle stat computation from reduced global labels.""" + # return directly if model predict is empty for global + if model_pred == {}: + return {}, {} + # get label dict from sample; for each key, only picking the system with global labels. outputs = { kk: [ diff --git a/source/tests/pt/test_finetune.py b/source/tests/pt/test_finetune.py index 2db3076da2..f92dde2cff 100644 --- a/source/tests/pt/test_finetune.py +++ b/source/tests/pt/test_finetune.py @@ -44,6 +44,7 @@ ) from .model.test_permutation import ( + model_dos, model_dpa1, model_dpa2, model_se_e2_a, @@ -72,6 +73,13 @@ must=False, high_prec=False, ), + DataRequirementItem( + "dos", + ndof=250, + atomic=False, + must=False, + high_prec=True, + ), DataRequirementItem( "atom_ener", ndof=1, @@ -92,6 +100,7 @@ class FinetuneTest: def test_finetune_change_out_bias(self): + self.testkey = "energy" if self.testkey is None else self.testkey # get data data = DpLoaderSet( self.data_file, @@ -108,7 +117,7 @@ def test_finetune_change_out_bias(self): model = get_model(self.config["model"]).to(env.DEVICE) atomic_model = model.atomic_model atomic_model["out_bias"] = torch.rand_like(atomic_model["out_bias"]) - energy_bias_before = to_numpy_array(atomic_model["out_bias"])[0].ravel() + energy_bias_before = to_numpy_array(atomic_model["out_bias"])[0] # prepare original model for test dp = torch.jit.script(model) @@ -123,7 +132,7 @@ def test_finetune_change_out_bias(self): sampled, bias_adjust_mode="change-by-statistic", ) - energy_bias_after = to_numpy_array(atomic_model["out_bias"])[0].ravel() + energy_bias_after = to_numpy_array(atomic_model["out_bias"])[0] # get ground-truth energy bias change sorter = np.argsort(full_type_map) @@ -140,10 +149,10 @@ def test_finetune_change_out_bias(self): to_numpy_array(sampled[0]["box"][:ntest]), to_numpy_array(sampled[0]["atype"][0]), )[0] - energy_diff = to_numpy_array(sampled[0]["energy"][:ntest]) - energy + energy_diff = to_numpy_array(sampled[0][self.testkey][:ntest]) - energy finetune_shift = ( energy_bias_after[idx_type_map] - energy_bias_before[idx_type_map] - ) + ).ravel() ground_truth_shift = np.linalg.lstsq(atom_nums, energy_diff, rcond=None)[ 0 ].reshape(-1) @@ -262,6 +271,7 @@ def setUp(self): self.config["training"]["numb_steps"] = 1 self.config["training"]["save_freq"] = 1 self.mixed_types = False + self.testkey = None class TestEnergyZBLModelSeA(FinetuneTest, unittest.TestCase): @@ -276,6 +286,22 @@ def setUp(self): self.config["training"]["numb_steps"] = 1 self.config["training"]["save_freq"] = 1 self.mixed_types = False + self.testkey = None + + +class TestEnergyDOSModelSeA(FinetuneTest, unittest.TestCase): + def setUp(self): + input_json = str(Path(__file__).parent / "dos/input.json") + with open(input_json) as f: + self.config = json.load(f) + self.data_file = [str(Path(__file__).parent / "dos/data/global_system")] + self.config["training"]["training_data"]["systems"] = self.data_file + self.config["training"]["validation_data"]["systems"] = self.data_file + self.config["model"] = deepcopy(model_dos) + self.config["training"]["numb_steps"] = 1 + self.config["training"]["save_freq"] = 1 + self.mixed_types = False + self.testkey = "dos" class TestEnergyModelDPA1(FinetuneTest, unittest.TestCase): @@ -290,6 +316,7 @@ def setUp(self): self.config["training"]["numb_steps"] = 1 self.config["training"]["save_freq"] = 1 self.mixed_types = True + self.testkey = None class TestEnergyModelDPA2(FinetuneTest, unittest.TestCase): @@ -306,6 +333,7 @@ def setUp(self): self.config["training"]["numb_steps"] = 1 self.config["training"]["save_freq"] = 1 self.mixed_types = True + self.testkey = None if __name__ == "__main__":