mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-01-25 15:55:18 +00:00
[feat] move func up
This commit is contained in:
parent
777ee90206
commit
0215f8013f
@ -87,6 +87,13 @@ def get_torch_device():
|
||||
else:
|
||||
return torch.device(torch.cuda.current_device())
|
||||
|
||||
def get_containerd_memory_limit():
|
||||
cgroup_memory_limit = '/sys/fs/cgroup/memory/memory.limit_in_bytes'
|
||||
if os.path.isfile(cgroup_memory_limit):
|
||||
with open(cgroup_memory_limit, 'r') as f:
|
||||
return int(f.read())
|
||||
return 0
|
||||
|
||||
def get_total_memory(dev=None, torch_total_too=False):
|
||||
global directml_enabled
|
||||
if dev is None:
|
||||
@ -707,12 +714,6 @@ def is_device_mps(device):
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_containerd_memory_limit():
|
||||
cgroup_memory_limit = '/sys/fs/cgroup/memory/memory.limit_in_bytes'
|
||||
if os.path.isfile(cgroup_memory_limit):
|
||||
with open(cgroup_memory_limit, 'r') as f:
|
||||
return int(f.read())
|
||||
return 0
|
||||
|
||||
def should_use_fp16(device=None, model_params=0, prioritize_performance=True, manual_cast=False):
|
||||
global directml_enabled
|
||||
|
Loading…
Reference in New Issue
Block a user