mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-01-11 02:15:17 +00:00
Fix mask issue with attention_xformers.
This commit is contained in:
parent
22535d0589
commit
07f6eeaa13
@ -372,10 +372,10 @@ def attention_xformers(q, k, v, heads, mask=None, attn_precision=None, skip_resh
|
|||||||
)
|
)
|
||||||
|
|
||||||
if mask is not None:
|
if mask is not None:
|
||||||
pad = 8 - q.shape[1] % 8
|
pad = 8 - mask.shape[-1] % 8
|
||||||
mask_out = torch.empty([q.shape[0], q.shape[1], q.shape[1] + pad], dtype=q.dtype, device=q.device)
|
mask_out = torch.empty([q.shape[0], q.shape[2], q.shape[1], mask.shape[-1] + pad], dtype=q.dtype, device=q.device)
|
||||||
mask_out[:, :, :mask.shape[-1]] = mask
|
mask_out[..., :mask.shape[-1]] = mask
|
||||||
mask = mask_out[:, :, :mask.shape[-1]]
|
mask = mask_out[..., :mask.shape[-1]]
|
||||||
|
|
||||||
out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=mask)
|
out = xformers.ops.memory_efficient_attention(q, k, v, attn_bias=mask)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user