Fix "Comfy" lora keys.

They are in this format now:
diffusion_model.full.model.key.name.lora_up.weight
This commit is contained in:
comfyanonymous 2024-08-07 13:49:31 -04:00
parent e1c528196e
commit 1208863eca

View File

@ -245,7 +245,7 @@ def model_lora_keys_unet(model, key_map={}):
key_lora = k[len("diffusion_model."):-len(".weight")].replace(".", "_")
key_map["lora_unet_{}".format(key_lora)] = k
key_map["lora_prior_unet_{}".format(key_lora)] = k #cascade lora: TODO put lora key prefix in the model config
key_map["model.{}".format(k[:-len(".weight")])] = k #generic lora format without any weird key names
key_map["{}".format(k[:-len(".weight")])] = k #generic lora format without any weird key names
diffusers_keys = comfy.utils.unet_to_diffusers(model.model_config.unet_config)
for k in diffusers_keys: