[feat] move func up

This commit is contained in:
admin 2024-02-12 03:08:45 +08:00
parent 777ee90206
commit 0215f8013f

View File

@ -87,6 +87,13 @@ def get_torch_device():
else:
return torch.device(torch.cuda.current_device())
def get_containerd_memory_limit():
cgroup_memory_limit = '/sys/fs/cgroup/memory/memory.limit_in_bytes'
if os.path.isfile(cgroup_memory_limit):
with open(cgroup_memory_limit, 'r') as f:
return int(f.read())
return 0
def get_total_memory(dev=None, torch_total_too=False):
global directml_enabled
if dev is None:
@ -707,12 +714,6 @@ def is_device_mps(device):
return True
return False
def get_containerd_memory_limit():
cgroup_memory_limit = '/sys/fs/cgroup/memory/memory.limit_in_bytes'
if os.path.isfile(cgroup_memory_limit):
with open(cgroup_memory_limit, 'r') as f:
return int(f.read())
return 0
def should_use_fp16(device=None, model_params=0, prioritize_performance=True, manual_cast=False):
global directml_enabled