From 17030fd4c03331545698c8f1e299a17e1b93b8c6 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Wed, 7 Aug 2024 13:18:32 -0400 Subject: [PATCH] Support for "Comfy" lora format. The keys are just: model.full.model.key.name.lora_up.weight It is supported by all comfyui supported models. Now people can just convert loras to this format instead of having to ask for me to implement them. --- comfy/lora.py | 1 + 1 file changed, 1 insertion(+) diff --git a/comfy/lora.py b/comfy/lora.py index 04e8861c9d9..eecde3927e9 100644 --- a/comfy/lora.py +++ b/comfy/lora.py @@ -245,6 +245,7 @@ def model_lora_keys_unet(model, key_map={}): key_lora = k[len("diffusion_model."):-len(".weight")].replace(".", "_") key_map["lora_unet_{}".format(key_lora)] = k key_map["lora_prior_unet_{}".format(key_lora)] = k #cascade lora: TODO put lora key prefix in the model config + key_map["model.{}".format(k[:-len(".weight")])] = k #generic lora format without any weird key names diffusers_keys = comfy.utils.unet_to_diffusers(model.model_config.unet_config) for k in diffusers_keys: