Update attention.py

This commit is contained in:
comfyanonymous 2025-03-14 03:21:16 -04:00 committed by GitHub
parent f90ec0e6e2
commit 62a5b4ee88
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -544,7 +544,7 @@ def attention_flash(q, k, v, heads, mask=None, attn_precision=None, skip_reshape
causal=False,
).transpose(1, 2)
except Exception as e:
logging.warning("Flash Attention failed, using default SDPA: {e}")
logging.warning(f"Flash Attention failed, using default SDPA: {e}")
out = torch.nn.functional.scaled_dot_product_attention(q, k, v, attn_mask=mask, dropout_p=0.0, is_causal=False)
if not skip_output_reshape:
out = (