From 3661e10648e1174ae978c6834b384b5d0331e2ce Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Sun, 29 Jan 2023 13:12:22 -0500 Subject: [PATCH] Add a command line option to disable upcasting in some cross attention ops. --- main.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/main.py b/main.py index 209128f15..bc0af3dd4 100644 --- a/main.py +++ b/main.py @@ -6,6 +6,10 @@ import threading import queue import traceback +if '--dont-upcast-attention' in sys.argv: + print("disabling upcasting of attention") + os.environ['ATTN_PRECISION'] = "fp16" + import torch import nodes