Skip to content

Commit

Permalink
Merge pull request #15816 from huchenlei/bias_backup
Browse files Browse the repository at this point in the history
[Performance 5/6] Prevent unnecessary extra networks bias backup
  • Loading branch information
AUTOMATIC1111 authored Jun 8, 2024
2 parents 371cb60 + b2ae449 commit 816bc42
Showing 1 changed file with 6 additions and 1 deletion.
7 changes: 6 additions & 1 deletion extensions-builtin/Lora/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,13 +388,18 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn
self.network_weights_backup = weights_backup

bias_backup = getattr(self, "network_bias_backup", None)
if bias_backup is None:
if bias_backup is None and wanted_names != ():
if isinstance(self, torch.nn.MultiheadAttention) and self.out_proj.bias is not None:
bias_backup = self.out_proj.bias.to(devices.cpu, copy=True)
elif getattr(self, 'bias', None) is not None:
bias_backup = self.bias.to(devices.cpu, copy=True)
else:
bias_backup = None

# Unlike weight which always has value, some modules don't have bias.
# Only report if bias is not None and current bias are not unchanged.
if bias_backup is not None and current_names != ():
raise RuntimeError("no backup bias found and current bias are not unchanged")
self.network_bias_backup = bias_backup

if current_names != wanted_names:
Expand Down

0 comments on commit 816bc42

Please sign in to comment.