Skip to content

Commit

Permalink
updating using adapter only
Browse files Browse the repository at this point in the history
  • Loading branch information
SalmanMohammadi committed Sep 15, 2024
1 parent 05620fe commit 4991014
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 7 deletions.
4 changes: 1 addition & 3 deletions recipes/lora_dpo_single_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,9 +434,7 @@ def save_checkpoint(self, epoch: int) -> None:
else:
# No need to merge state dict if we're only saving adapter weights
adapter_state_dict = {
k: v
for k, v in self._model.state_dict().items()
if adapter_key_filter(k)
k: v.cpu() for k, v in get_adapter_params(self._model).items()
}

ckpt_dict.update({training.ADAPTER_KEY: adapter_state_dict})
Expand Down
6 changes: 2 additions & 4 deletions recipes/lora_finetune_single_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,6 @@ def save_checkpoint(self, epoch: int) -> None:
}
)

adapter_key_filter = lambda x: x in self.adapter_params
if not self._save_adapter_weights_only:
# Construct the full state dict with LoRA weights merged into base LLM weights

Expand All @@ -539,6 +538,7 @@ def save_checkpoint(self, epoch: int) -> None:
# Construct the adapter weights
# Do this using the state_dict to avoid running upcast and H2D in state_dict post hook twice
# Must be before get_merged_lora_ckpt because get_merged_lora_ckpt will remove lora keys
adapter_key_filter = lambda x: x in self.adapter_params
adapter_state_dict = {
k: v for k, v in state_dict.items() if adapter_key_filter(k)
}
Expand All @@ -553,9 +553,7 @@ def save_checkpoint(self, epoch: int) -> None:
else:
# No need to merge state dict if we're only saving adapter weights
adapter_state_dict = {
k: v
for k, v in self._model.state_dict().items()
if adapter_key_filter(k)
k: v.cpu() for k, v in get_adapter_params(self._model).items()
}

ckpt_dict.update({training.ADAPTER_KEY: adapter_state_dict})
Expand Down

0 comments on commit 4991014

Please sign in to comment.