Skip to content

Commit

Permalink
Merge pull request #37 from invoke-ai/bump-deps
Browse files Browse the repository at this point in the history
Bump diffusers and transformers versions.
  • Loading branch information
RyanJDick authored Dec 7, 2023
2 parents c7eaaf2 + 2079821 commit 20a9fee
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ classifiers = [
dependencies = [
"accelerate~=0.21.0",
"datasets~=2.14.3",
"diffusers~=0.19.3",
"diffusers~=0.24.0",
"numpy",
"omegaconf",
"Pillow",
Expand All @@ -29,7 +29,7 @@ dependencies = [
"torch>=2.0.1",
"torchvision~=0.15.2",
"tqdm",
"transformers~=4.31.0",
"transformers~=4.35.0",
# Known issue with xformers 0.0.16 on some GPUs:
# https://github.com/huggingface/diffusers/issues/2234#issuecomment-1416931212
"xformers>=0.0.17",
Expand Down
4 changes: 2 additions & 2 deletions src/invoke_training/lora/lora_block.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,5 @@ def __init__(self, original_module: torch.nn.Module, lora_layer: torch.nn.Module
self.lora_layer = lora_layer
self.lora_multiplier = lora_multiplier

def forward(self, input):
return self.original_module(input) + self.lora_multiplier * self.lora_layer(input)
def forward(self, input, *args, **kwargs):
return self.original_module(input, *args, **kwargs) + self.lora_multiplier * self.lora_layer(input)

0 comments on commit 20a9fee

Please sign in to comment.