This commit is contained in:
jiangxuefeng 2025-04-11 09:46:27 -04:00 committed by GitHub
commit 8915a96c93
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 20 additions and 6 deletions

View File

@ -138,6 +138,7 @@ class PerformanceFeature(enum.Enum):
Fp8MatrixMultiplication = "fp8_matrix_mult" Fp8MatrixMultiplication = "fp8_matrix_mult"
parser.add_argument("--fast", nargs="*", type=PerformanceFeature, help="Enable some untested and potentially quality deteriorating optimizations. --fast with no arguments enables everything. You can pass a list specific optimizations if you only want to enable specific ones. Current valid optimizations: fp16_accumulation fp8_matrix_mult") parser.add_argument("--fast", nargs="*", type=PerformanceFeature, help="Enable some untested and potentially quality deteriorating optimizations. --fast with no arguments enables everything. You can pass a list specific optimizations if you only want to enable specific ones. Current valid optimizations: fp16_accumulation fp8_matrix_mult")
parser.add_argument("--disable-mmap", action="store_true", help="When load .safetensors or .sft model sometimes.")
parser.add_argument("--dont-print-server", action="store_true", help="Don't print server output.") parser.add_argument("--dont-print-server", action="store_true", help="Don't print server output.")
parser.add_argument("--quick-test-for-ci", action="store_true", help="Quick test for CI.") parser.add_argument("--quick-test-for-ci", action="store_true", help="Quick test for CI.")

View File

@ -28,6 +28,7 @@ import logging
import itertools import itertools
from torch.nn.functional import interpolate from torch.nn.functional import interpolate
from einops import rearrange from einops import rearrange
from comfy.cli_args import args
ALWAYS_SAFE_LOAD = False ALWAYS_SAFE_LOAD = False
if hasattr(torch.serialization, "add_safe_globals"): # TODO: this was added in pytorch 2.4, the unsafe path should be removed once earlier versions are deprecated if hasattr(torch.serialization, "add_safe_globals"): # TODO: this was added in pytorch 2.4, the unsafe path should be removed once earlier versions are deprecated
@ -46,18 +47,30 @@ if hasattr(torch.serialization, "add_safe_globals"): # TODO: this was added in
else: else:
logging.info("Warning, you are using an old pytorch version and some ckpt/pt files might be loaded unsafely. Upgrading to 2.4 or above is recommended.") logging.info("Warning, you are using an old pytorch version and some ckpt/pt files might be loaded unsafely. Upgrading to 2.4 or above is recommended.")
def load_torch_file(ckpt, safe_load=False, device=None, return_metadata=False): def load_torch_file(ckpt, safe_load=False, device=None, return_metadata=False, disable_mmap=None):
if device is None: if device is None:
device = torch.device("cpu") device = torch.device("cpu")
metadata = None metadata = None
if ckpt.lower().endswith(".safetensors") or ckpt.lower().endswith(".sft"): if ckpt.lower().endswith(".safetensors") or ckpt.lower().endswith(".sft"):
try: try:
with safetensors.safe_open(ckpt, framework="pt", device=device.type) as f: if disable_mmap is None:
sd = {} disable_mmap_decision = args.disable_mmap
for k in f.keys(): else:
sd[k] = f.get_tensor(k) disable_mmap_decision = True
if disable_mmap_decision:
pl_sd = safetensors.torch.load(open(ckpt, 'rb').read())
sd = {k: v.to(device) for k, v in pl_sd.items()}
if return_metadata: if return_metadata:
metadata = f.metadata() with safetensors.safe_open(ckpt, framework="pt", device=device.type) as f:
metadata = f.metadata()
else:
with safetensors.safe_open(ckpt, framework="pt", device=device.type) as f:
sd = {}
for k in f.keys():
sd[k] = f.get_tensor(k)
if return_metadata:
metadata = f.metadata()
except Exception as e: except Exception as e:
if len(e.args) > 0: if len(e.args) > 0:
message = e.args[0] message = e.args[0]