From 103c487a897bcf82cc8add4035f38ac920a7f150 Mon Sep 17 00:00:00 2001
From: comfyanonymous <comfyanonymous@protonmail.com>
Date: Sun, 2 Jul 2023 11:57:36 -0400
Subject: [PATCH] Cleanup.

---
 comfy/ldm/modules/attention.py | 11 +++++++----
 main.py                        |  4 ----
 2 files changed, 7 insertions(+), 8 deletions(-)

diff --git a/comfy/ldm/modules/attention.py b/comfy/ldm/modules/attention.py
index 0c54f7f4..5f9eaa6e 100644
--- a/comfy/ldm/modules/attention.py
+++ b/comfy/ldm/modules/attention.py
@@ -16,11 +16,14 @@ if model_management.xformers_enabled():
     import xformers
     import xformers.ops
 
-# CrossAttn precision handling
-import os
-_ATTN_PRECISION = os.environ.get("ATTN_PRECISION", "fp32")
-
 from comfy.cli_args import args
+# CrossAttn precision handling
+if args.dont_upcast_attention:
+    print("disabling upcasting of attention")
+    _ATTN_PRECISION = "fp16"
+else:
+    _ATTN_PRECISION = "fp32"
+
 
 def exists(val):
     return val is not None
diff --git a/main.py b/main.py
index 22425d2a..71564997 100644
--- a/main.py
+++ b/main.py
@@ -14,10 +14,6 @@ if os.name == "nt":
     logging.getLogger("xformers").addFilter(lambda record: 'A matching Triton is not available' not in record.getMessage())
 
 if __name__ == "__main__":
-    if args.dont_upcast_attention:
-        print("disabling upcasting of attention")
-        os.environ['ATTN_PRECISION'] = "fp16"
-
     if args.cuda_device is not None:
         os.environ['CUDA_VISIBLE_DEVICES'] = str(args.cuda_device)
         print("Set cuda device to:", args.cuda_device)