Automatically enable lowvram mode if vram is less than 4GB.

Use: --normalvram to disable it.
This commit is contained in:
comfyanonymous 2023-02-10 00:47:56 -05:00
parent e9d3ac2ba0
commit 7e1e193f39
2 changed files with 12 additions and 5 deletions

View File

@ -13,16 +13,22 @@ total_vram_available_mb = -1
import sys
set_vram_to = NORMAL_VRAM
try:
import torch
total_vram = torch.cuda.mem_get_info(torch.cuda.current_device())[1] / (1024 * 1024)
if total_vram <= 4096 and not "--normalvram" in sys.argv:
print("Trying to enable lowvram mode because your GPU seems to have 4GB or less. If you don't want this use: --normalvram")
set_vram_to = LOW_VRAM
except:
pass
if "--lowvram" in sys.argv:
set_vram_to = LOW_VRAM
if "--novram" in sys.argv:
set_vram_to = NO_VRAM
try:
import torch
total_vram = torch.cuda.mem_get_info(torch.cuda.current_device())[1] / (1024 * 1024)
except:
pass
if set_vram_to != NORMAL_VRAM:
try:

View File

@ -14,6 +14,7 @@ if __name__ == "__main__":
print("\t--dont-upcast-attention\t\tDisable upcasting of attention \n\t\t\t\t\tcan boost speed but increase the chances of black images.\n")
print("\t--use-split-cross-attention\tUse the split cross attention optimization instead of the sub-quadratic one.\n\t\t\t\t\tIgnored when xformers is used.")
print()
print("\t--normalvram\t\t\tUsed to force normal vram use if lowvram gets automatically enabled.")
print("\t--lowvram\t\t\tSplit the unet in parts to use less vram.")
print("\t--novram\t\t\tWhen lowvram isn't enough.")
print()