From ea77750759cc213d7c30ebb6d42c026bf23b829c Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Sat, 7 Sep 2024 02:13:13 -0400 Subject: [PATCH] Support a generic Comfy format for text encoder loras. This is a format with keys like: text_encoders.clip_l.transformer.text_model.encoder.layers.9.self_attn.v_proj.lora_up.weight Instead of waiting for me to add support for specific lora formats you can convert your text encoder loras to this format instead. If you want to see an example save a text encoder lora with the SaveLora node with the commit right after this one. --- comfy/lora.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/comfy/lora.py b/comfy/lora.py index eb95d02ab..ad951bbaf 100644 --- a/comfy/lora.py +++ b/comfy/lora.py @@ -201,6 +201,9 @@ def load_lora(lora, to_load): def model_lora_keys_clip(model, key_map={}): sdk = model.state_dict().keys() + for k in sdk: + if k.endswith(".weight"): + key_map["text_encoders.{}".format(k[:-len(".weight")])] = k #generic lora format without any weird key names text_model_lora_key = "lora_te_text_model_encoder_layers_{}_{}" clip_l_present = False