From 1e0f2b232bd0b1a5dbd5c16f08e86a5d421a91aa Mon Sep 17 00:00:00 2001 From: Davemane42 Date: Tue, 28 Mar 2023 02:52:12 -0400 Subject: [PATCH 1/6] add unique_id to nodes hidden inputs @classmethod def INPUT_TYPES(cls): return { "hidden": {"unique_id": "UNIQUE_ID"}, } --- execution.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/execution.py b/execution.py index 3ca551db..2b26a0f7 100644 --- a/execution.py +++ b/execution.py @@ -10,7 +10,7 @@ import gc import torch import nodes -def get_input_data(inputs, class_def, outputs={}, prompt={}, extra_data={}): +def get_input_data(inputs, class_def, unique_id, outputs={}, prompt={}, extra_data={}): valid_inputs = class_def.INPUT_TYPES() input_data_all = {} for x in inputs: @@ -34,6 +34,8 @@ def get_input_data(inputs, class_def, outputs={}, prompt={}, extra_data={}): if h[x] == "EXTRA_PNGINFO": if "extra_pnginfo" in extra_data: input_data_all[x] = extra_data['extra_pnginfo'] + if h[x] == "UNIQUE_ID": + input_data_all[x] = unique_id return input_data_all def recursive_execute(server, prompt, outputs, current_item, extra_data={}): @@ -55,7 +57,7 @@ def recursive_execute(server, prompt, outputs, current_item, extra_data={}): if input_unique_id not in outputs: executed += recursive_execute(server, prompt, outputs, input_unique_id, extra_data) - input_data_all = get_input_data(inputs, class_def, outputs, prompt, extra_data) + input_data_all = get_input_data(inputs, class_def, unique_id, outputs, prompt, extra_data) if server.client_id is not None: server.last_node_id = unique_id server.send_sync("executing", { "node": unique_id }, server.client_id) @@ -96,7 +98,7 @@ def recursive_output_delete_if_changed(prompt, old_prompt, outputs, current_item if unique_id in old_prompt and 'is_changed' in old_prompt[unique_id]: is_changed_old = old_prompt[unique_id]['is_changed'] if 'is_changed' not in prompt[unique_id]: - input_data_all = get_input_data(inputs, class_def, outputs) + input_data_all = get_input_data(inputs, class_def, unique_id, outputs) if input_data_all is not None: is_changed = class_def.IS_CHANGED(**input_data_all) prompt[unique_id]['is_changed'] = is_changed From 393084877c71dc7dc9549b0f3694b02060151917 Mon Sep 17 00:00:00 2001 From: Farid Safi Date: Tue, 28 Mar 2023 19:45:17 +0200 Subject: [PATCH 2/6] clean state when loading another workflow --- web/scripts/app.js | 7 +++++++ web/scripts/ui.js | 11 +++++++++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/web/scripts/app.js b/web/scripts/app.js index 609cc4cf..ddb829ab 100644 --- a/web/scripts/app.js +++ b/web/scripts/app.js @@ -903,6 +903,13 @@ class ComfyApp { } } } + + /** + * Clean current state + */ + clean() { + this.nodeOutputs = {}; + } } export const app = new ComfyApp(); diff --git a/web/scripts/ui.js b/web/scripts/ui.js index c79caaa9..7e73c108 100644 --- a/web/scripts/ui.js +++ b/web/scripts/ui.js @@ -306,6 +306,7 @@ export class ComfyUI { style: { display: "none" }, parent: document.body, onchange: () => { + app.clean(); app.handleFile(fileInput.files[0]); }, }); @@ -388,8 +389,14 @@ export class ComfyUI { }), $el("button", { textContent: "Load", onclick: () => fileInput.click() }), $el("button", { textContent: "Refresh", onclick: () => app.refreshComboInNodes() }), - $el("button", { textContent: "Clear", onclick: () => app.graph.clear() }), - $el("button", { textContent: "Load Default", onclick: () => app.loadGraphData() }), + $el("button", { textContent: "Clear", onclick: () => { + app.clean(); + app.graph.clear(); + }}), + $el("button", { textContent: "Load Default", onclick: () => { + app.clean(); + app.loadGraphData(); + }}), ]); dragElement(this.menuContainer); From 40a377775e7e383c09418297637a0cc261ead96d Mon Sep 17 00:00:00 2001 From: Farid Safi Date: Tue, 28 Mar 2023 20:22:49 +0200 Subject: [PATCH 3/6] move clean to handleFile and loadGraphData functions --- web/scripts/app.js | 2 ++ web/scripts/ui.js | 6 +----- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/web/scripts/app.js b/web/scripts/app.js index ddb829ab..cd7fb5d1 100644 --- a/web/scripts/app.js +++ b/web/scripts/app.js @@ -721,6 +721,8 @@ class ComfyApp { * @param {*} graphData A serialized graph object */ loadGraphData(graphData) { + this.clean(); + if (!graphData) { graphData = defaultGraph; } diff --git a/web/scripts/ui.js b/web/scripts/ui.js index 7e73c108..2aabd29e 100644 --- a/web/scripts/ui.js +++ b/web/scripts/ui.js @@ -306,7 +306,6 @@ export class ComfyUI { style: { display: "none" }, parent: document.body, onchange: () => { - app.clean(); app.handleFile(fileInput.files[0]); }, }); @@ -393,10 +392,7 @@ export class ComfyUI { app.clean(); app.graph.clear(); }}), - $el("button", { textContent: "Load Default", onclick: () => { - app.clean(); - app.loadGraphData(); - }}), + $el("button", { textContent: "Load Default", onclick: () => app.loadGraphData() }), ]); dragElement(this.menuContainer); From 0d65cb17b77ff80f76a6fe8181860c1694158645 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Tue, 28 Mar 2023 16:29:35 -0400 Subject: [PATCH 4/6] Fix ddim_uniform crashing with 37 steps. --- comfy/samplers.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/comfy/samplers.py b/comfy/samplers.py index 17201d9d..4f61a846 100644 --- a/comfy/samplers.py +++ b/comfy/samplers.py @@ -242,7 +242,10 @@ def ddim_scheduler(model, steps): sigs = [] ddim_timesteps = make_ddim_timesteps(ddim_discr_method="uniform", num_ddim_timesteps=steps, num_ddpm_timesteps=model.inner_model.inner_model.num_timesteps, verbose=False) for x in range(len(ddim_timesteps) - 1, -1, -1): - sigs.append(model.t_to_sigma(torch.tensor(ddim_timesteps[x]))) + ts = ddim_timesteps[x] + if ts > 999: + ts = 999 + sigs.append(model.t_to_sigma(torch.tensor(ts))) sigs += [0.0] return torch.FloatTensor(sigs) @@ -373,7 +376,7 @@ class KSampler: def set_steps(self, steps, denoise=None): self.steps = steps - if denoise is None: + if denoise is None or denoise > 0.9999: self.sigmas = self._calculate_sigmas(steps) else: new_steps = int(steps/denoise) From 3ed814b01fcad1cf95bfd3fde4c18d0ae8bc9b13 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Tue, 28 Mar 2023 23:58:27 -0400 Subject: [PATCH 5/6] Fix colab. --- notebooks/comfyui_colab.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/comfyui_colab.ipynb b/notebooks/comfyui_colab.ipynb index 5108ec83..276579c9 100644 --- a/notebooks/comfyui_colab.ipynb +++ b/notebooks/comfyui_colab.ipynb @@ -47,7 +47,7 @@ " !git pull\n", "\n", "!echo -= Install dependencies =-\n", - "!pip -q install xformers -r requirements.txt" + "!pip -q install xformers==0.0.16 -r requirements.txt" ] }, { From b2554bc4dd69c3f5ad965edbc5585c4ff4948458 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Wed, 29 Mar 2023 02:24:37 -0400 Subject: [PATCH 6/6] Split VAE decode batches depending on free memory. --- comfy/sd.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/comfy/sd.py b/comfy/sd.py index d767d867..2e1ae840 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -439,9 +439,14 @@ class VAE: model_management.unload_model() self.first_stage_model = self.first_stage_model.to(self.device) try: - samples = samples_in.to(self.device) - pixel_samples = self.first_stage_model.decode(1. / self.scale_factor * samples) - pixel_samples = torch.clamp((pixel_samples + 1.0) / 2.0, min=0.0, max=1.0) + free_memory = model_management.get_free_memory(self.device) + batch_number = int((free_memory * 0.7) / (2562 * samples_in.shape[2] * samples_in.shape[3] * 64)) + batch_number = max(1, batch_number) + + pixel_samples = torch.empty((samples_in.shape[0], 3, round(samples_in.shape[2] * 8), round(samples_in.shape[3] * 8)), device="cpu") + for x in range(0, samples_in.shape[0], batch_number): + samples = samples_in[x:x+batch_number].to(self.device) + pixel_samples[x:x+batch_number] = torch.clamp((self.first_stage_model.decode(1. / self.scale_factor * samples) + 1.0) / 2.0, min=0.0, max=1.0).cpu() except model_management.OOM_EXCEPTION as e: print("Warning: Ran out of memory when regular VAE decoding, retrying with tiled VAE decoding.") pixel_samples = self.decode_tiled_(samples_in)