Tweak lowvram memory formula.

This commit is contained in:
comfyanonymous 2024-08-03 16:34:27 -04:00
parent 03c5018c98
commit 91be9c2867

View File

@ -450,7 +450,7 @@ def load_models_gpu(models, memory_required=0, force_patch_weights=False, minimu
if lowvram_available and (vram_set_state == VRAMState.LOW_VRAM or vram_set_state == VRAMState.NORMAL_VRAM): if lowvram_available and (vram_set_state == VRAMState.LOW_VRAM or vram_set_state == VRAMState.NORMAL_VRAM):
model_size = loaded_model.model_memory_required(torch_dev) model_size = loaded_model.model_memory_required(torch_dev)
current_free_mem = get_free_memory(torch_dev) current_free_mem = get_free_memory(torch_dev)
lowvram_model_memory = max(64 * (1024 * 1024), (current_free_mem - minimum_memory_required), current_free_mem * 0.33) lowvram_model_memory = max(64 * (1024 * 1024), (current_free_mem - minimum_memory_required), min(current_free_mem * 0.4, current_free_mem - minimum_inference_memory()))
if model_size <= lowvram_model_memory: #only switch to lowvram if really necessary if model_size <= lowvram_model_memory: #only switch to lowvram if really necessary
lowvram_model_memory = 0 lowvram_model_memory = 0