Fix issue with wan and other attention implementations.

This commit is contained in:
comfyanonymous 2025-02-25 19:13:39 -05:00
parent f37551c1d2
commit ea0f939df3

View File

@ -88,9 +88,9 @@ class WanSelfAttention(nn.Module):
q, k = apply_rope(q, k, freqs)
x = optimized_attention(
q=q.view(b, s, n * d),
k=k.view(b, s, n * d),
v=v,
q.view(b, s, n * d),
k.view(b, s, n * d),
v,
heads=self.num_heads,
)