Add a try except block so if torch version is weird it won't crash.

This commit is contained in:
comfyanonymous 2024-12-23 03:22:48 -05:00
parent c6b9c11ef6
commit 15564688ed

View File

@ -224,8 +224,11 @@ if ENABLE_PYTORCH_ATTENTION:
torch.backends.cuda.enable_flash_sdp(True) torch.backends.cuda.enable_flash_sdp(True)
torch.backends.cuda.enable_mem_efficient_sdp(True) torch.backends.cuda.enable_mem_efficient_sdp(True)
if int(torch_version[0]) == 2 and int(torch_version[2]) >= 5: try:
torch.backends.cuda.allow_fp16_bf16_reduction_math_sdp(True) if int(torch_version[0]) == 2 and int(torch_version[2]) >= 5:
torch.backends.cuda.allow_fp16_bf16_reduction_math_sdp(True)
except:
logging.warning("Warning, could not set allow_fp16_bf16_reduction_math_sdp")
if args.lowvram: if args.lowvram:
set_vram_to = VRAMState.LOW_VRAM set_vram_to = VRAMState.LOW_VRAM