Skip to content

Commit

Permalink
Merge pull request #14170 from MrCheeze/sd-turbo
Browse files Browse the repository at this point in the history
Add support for SD 2.1 Turbo
  • Loading branch information
AUTOMATIC1111 committed Dec 2, 2023
2 parents e294e46 + 6080045 commit 4125552
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 7 deletions.
9 changes: 6 additions & 3 deletions modules/sd_hijack.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,6 @@
optimizers = []
current_optimizer: sd_hijack_optimizations.SdOptimization = None

ldm_original_forward = patches.patch(__file__, ldm.modules.diffusionmodules.openaimodel.UNetModel, "forward", sd_unet.UNetModel_forward)
sgm_original_forward = patches.patch(__file__, sgm.modules.diffusionmodules.openaimodel.UNetModel, "forward", sd_unet.UNetModel_forward)

def list_optimizers():
new_optimizers = script_callbacks.list_optimizers_callback()

Expand Down Expand Up @@ -258,6 +255,9 @@ def flatten(el):

import modules.models.diffusion.ddpm_edit

ldm_original_forward = patches.patch(__file__, ldm.modules.diffusionmodules.openaimodel.UNetModel, "forward", sd_unet.UNetModel_forward)
sgm_original_forward = patches.patch(__file__, sgm.modules.diffusionmodules.openaimodel.UNetModel, "forward", sd_unet.UNetModel_forward)

if isinstance(m, ldm.models.diffusion.ddpm.LatentDiffusion):
sd_unet.original_forward = ldm_original_forward
elif isinstance(m, modules.models.diffusion.ddpm_edit.LatentDiffusion):
Expand Down Expand Up @@ -303,6 +303,9 @@ def undo_hijack(self, m):
self.layers = None
self.clip = None

patches.undo(__file__, ldm.modules.diffusionmodules.openaimodel.UNetModel, "forward")
patches.undo(__file__, sgm.modules.diffusionmodules.openaimodel.UNetModel, "forward")

sd_unet.original_forward = None


Expand Down
17 changes: 13 additions & 4 deletions modules/sd_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,15 +230,19 @@ def select_checkpoint():
return checkpoint_info


checkpoint_dict_replacements = {
checkpoint_dict_replacements_sd1 = {
'cond_stage_model.transformer.embeddings.': 'cond_stage_model.transformer.text_model.embeddings.',
'cond_stage_model.transformer.encoder.': 'cond_stage_model.transformer.text_model.encoder.',
'cond_stage_model.transformer.final_layer_norm.': 'cond_stage_model.transformer.text_model.final_layer_norm.',
}

checkpoint_dict_replacements_sd2_turbo = { # Converts SD 2.1 Turbo from SGM to LDM format.
'conditioner.embedders.0.': 'cond_stage_model.',
}


def transform_checkpoint_dict_key(k):
for text, replacement in checkpoint_dict_replacements.items():
def transform_checkpoint_dict_key(k, replacements):
for text, replacement in replacements.items():
if k.startswith(text):
k = replacement + k[len(text):]

Expand All @@ -249,9 +253,14 @@ def get_state_dict_from_checkpoint(pl_sd):
pl_sd = pl_sd.pop("state_dict", pl_sd)
pl_sd.pop("state_dict", None)

is_sd2_turbo = 'conditioner.embedders.0.model.ln_final.weight' in pl_sd and pl_sd['conditioner.embedders.0.model.ln_final.weight'].size()[0] == 1024

sd = {}
for k, v in pl_sd.items():
new_key = transform_checkpoint_dict_key(k)
if is_sd2_turbo:
new_key = transform_checkpoint_dict_key(k, checkpoint_dict_replacements_sd2_turbo)
else:
new_key = transform_checkpoint_dict_key(k, checkpoint_dict_replacements_sd1)

if new_key is not None:
sd[new_key] = v
Expand Down

0 comments on commit 4125552

Please sign in to comment.