From 8328a2d8cdabd0e42b856dd0193ebc24ea41c359 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Fri, 26 Jul 2024 12:11:32 -0400 Subject: [PATCH] Let hunyuan dit work with all prompt lengths. --- comfy/ldm/hydit/poolers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/comfy/ldm/hydit/poolers.py b/comfy/ldm/hydit/poolers.py index 3470041b..2c6e46e6 100644 --- a/comfy/ldm/hydit/poolers.py +++ b/comfy/ldm/hydit/poolers.py @@ -16,6 +16,7 @@ class AttentionPool(nn.Module): self.embed_dim = embed_dim def forward(self, x): + x = x[:,:self.positional_embedding.shape[0] - 1] x = x.permute(1, 0, 2) # NLC -> LNC x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (L+1)NC x = x + self.positional_embedding[:, None, :].to(dtype=x.dtype, device=x.device) # (L+1)NC