From 4a8a839b40fcae9960a6107200b89dce6675895d Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Sat, 11 Nov 2023 01:03:39 -0500 Subject: [PATCH] Add option to use in place weight updating in ModelPatcher. --- comfy/model_patcher.py | 21 ++++++++++++++++----- comfy/utils.py | 8 ++++++++ 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/comfy/model_patcher.py b/comfy/model_patcher.py index ef18d1b23..6d7a61c41 100644 --- a/comfy/model_patcher.py +++ b/comfy/model_patcher.py @@ -6,7 +6,7 @@ import comfy.utils import comfy.model_management class ModelPatcher: - def __init__(self, model, load_device, offload_device, size=0, current_device=None): + def __init__(self, model, load_device, offload_device, size=0, current_device=None, weight_inplace_update=False): self.size = size self.model = model self.patches = {} @@ -22,6 +22,8 @@ class ModelPatcher: else: self.current_device = current_device + self.weight_inplace_update = weight_inplace_update + def model_size(self): if self.size > 0: return self.size @@ -171,15 +173,20 @@ class ModelPatcher: weight = model_sd[key] + inplace_update = self.weight_inplace_update + if key not in self.backup: - self.backup[key] = weight.to(self.offload_device) + self.backup[key] = weight.to(device=device_to, copy=inplace_update) if device_to is not None: temp_weight = comfy.model_management.cast_to_device(weight, device_to, torch.float32, copy=True) else: temp_weight = weight.to(torch.float32, copy=True) out_weight = self.calculate_weight(self.patches[key], temp_weight, key).to(weight.dtype) - comfy.utils.set_attr(self.model, key, out_weight) + if inplace_update: + comfy.utils.copy_to_param(self.model, key, out_weight) + else: + comfy.utils.set_attr(self.model, key, out_weight) del temp_weight if device_to is not None: @@ -295,8 +302,12 @@ class ModelPatcher: def unpatch_model(self, device_to=None): keys = list(self.backup.keys()) - for k in keys: - comfy.utils.set_attr(self.model, k, self.backup[k]) + if self.weight_inplace_update: + for k in keys: + comfy.utils.copy_to_param(self.model, k, self.backup[k]) + else: + for k in keys: + comfy.utils.set_attr(self.model, k, self.backup[k]) self.backup = {} diff --git a/comfy/utils.py b/comfy/utils.py index 6a0c54e80..4b484d07a 100644 --- a/comfy/utils.py +++ b/comfy/utils.py @@ -261,6 +261,14 @@ def set_attr(obj, attr, value): setattr(obj, attrs[-1], torch.nn.Parameter(value)) del prev +def copy_to_param(obj, attr, value): + # inplace update tensor instead of replacing it + attrs = attr.split(".") + for name in attrs[:-1]: + obj = getattr(obj, name) + prev = getattr(obj, attrs[-1]) + prev.data.copy_(value) + def get_attr(obj, attr): attrs = attr.split(".") for name in attrs: