Support format for embeddings bundled in loras.

This commit is contained in:
comfyanonymous 2024-08-07 03:45:25 -04:00
parent 2a02546e20
commit 1c08bf35b4

View File

@ -313,6 +313,20 @@ def expand_directory_list(directories):
dirs.add(root)
return list(dirs)
def bundled_embed(embed, key): #bundled embedding in lora format
i = 0
out_list = []
while True:
i += 1
k = key.format(i)
w = embed.get(k, None)
if w is None:
break
else:
out_list.append(w)
return torch.cat(out_list, dim=0)
def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=None):
if isinstance(embedding_directory, str):
embedding_directory = [embedding_directory]
@ -378,6 +392,10 @@ def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=No
embed_out = torch.cat(out_list, dim=0)
elif embed_key is not None and embed_key in embed:
embed_out = embed[embed_key]
elif 'bundle_emb.place1.string_to_param.*' in embed:
embed_out = bundled_embed(embed, 'bundle_emb.place{}.string_to_param.*')
elif 'bundle_emb.place1.{}'.format(embed_key) in embed:
embed_out = bundled_embed(embed, 'bundle_emb.place{}.{}'.format('{}', embed_key))
else:
values = embed.values()
embed_out = next(iter(values))