Merge branch 'master' into multigpu_support

This commit is contained in:
Jedrzej Kosinski 2025-01-11 20:16:42 -06:00
commit 8d4b50158e
9 changed files with 294 additions and 36 deletions

58
.github/workflows/update-version.yml vendored Normal file
View File

@ -0,0 +1,58 @@
name: Update Version File
on:
pull_request:
paths:
- "pyproject.toml"
branches:
- master
jobs:
update-version:
runs-on: ubuntu-latest
# Don't run on fork PRs
if: github.event.pull_request.head.repo.full_name == github.repository
permissions:
pull-requests: write
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- name: Update comfyui_version.py
run: |
# Read version from pyproject.toml and update comfyui_version.py
python -c '
import tomllib
# Read version from pyproject.toml
with open("pyproject.toml", "rb") as f:
config = tomllib.load(f)
version = config["project"]["version"]
# Write version to comfyui_version.py
with open("comfyui_version.py", "w") as f:
f.write("# This file is automatically generated by the build process when version is\n")
f.write("# updated in pyproject.toml.\n")
f.write(f"__version__ = \"{version}\"\n")
'
- name: Commit changes
run: |
git config --local user.name "github-actions"
git config --local user.email "github-actions@github.com"
git fetch origin ${{ github.head_ref }}
git checkout -B ${{ github.head_ref }} origin/${{ github.head_ref }}
git add comfyui_version.py
git diff --quiet && git diff --staged --quiet || git commit -m "chore: Update comfyui_version.py to match pyproject.toml"
git push origin HEAD:${{ github.head_ref }}

View File

@ -386,3 +386,24 @@ class HunyuanVideo(LatentFormat):
class Cosmos1CV8x8x8(LatentFormat):
latent_channels = 16
latent_dimensions = 3
latent_rgb_factors = [
[ 0.1817, 0.2284, 0.2423],
[-0.0586, -0.0862, -0.3108],
[-0.4703, -0.4255, -0.3995],
[ 0.0803, 0.1963, 0.1001],
[-0.0820, -0.1050, 0.0400],
[ 0.2511, 0.3098, 0.2787],
[-0.1830, -0.2117, -0.0040],
[-0.0621, -0.2187, -0.0939],
[ 0.3619, 0.1082, 0.1455],
[ 0.3164, 0.3922, 0.2575],
[ 0.1152, 0.0231, -0.0462],
[-0.1434, -0.3609, -0.3665],
[ 0.0635, 0.1471, 0.1680],
[-0.3635, -0.1963, -0.3248],
[-0.1865, 0.0365, 0.2346],
[ 0.0447, 0.0994, 0.0881]
]
latent_rgb_factors_bias = [-0.1223, -0.1889, -0.1976]

View File

@ -189,7 +189,7 @@ class ModelSamplingContinuousEDM:
@classmethod
def INPUT_TYPES(s):
return {"required": { "model": ("MODEL",),
"sampling": (["v_prediction", "edm_playground_v2.5", "eps"],),
"sampling": (["v_prediction", "edm", "edm_playground_v2.5", "eps"],),
"sigma_max": ("FLOAT", {"default": 120.0, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}),
"sigma_min": ("FLOAT", {"default": 0.002, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}),
}}
@ -206,6 +206,9 @@ class ModelSamplingContinuousEDM:
sigma_data = 1.0
if sampling == "eps":
sampling_type = comfy.model_sampling.EPS
elif sampling == "edm":
sampling_type = comfy.model_sampling.EDM
sigma_data = 0.5
elif sampling == "v_prediction":
sampling_type = comfy.model_sampling.V_PREDICTION
elif sampling == "edm_playground_v2.5":

3
comfyui_version.py Normal file
View File

@ -0,0 +1,3 @@
# This file is automatically generated by the build process when version is
# updated in pyproject.toml.
__version__ = "0.3.10"

23
pyproject.toml Normal file
View File

@ -0,0 +1,23 @@
[project]
name = "ComfyUI"
version = "0.3.10"
readme = "README.md"
license = { file = "LICENSE" }
requires-python = ">=3.9"
[project.urls]
homepage = "https://www.comfy.org/"
repository = "https://github.com/comfyanonymous/ComfyUI"
documentation = "https://docs.comfy.org/"
[tool.ruff]
lint.select = [
"S307", # suspicious-eval-usage
"S102", # exec
"T", # print-usage
"W",
# The "F" series in Ruff stands for "Pyflakes" rules, which catch various Python syntax errors and undefined names.
# See all rules here: https://docs.astral.sh/ruff/rules/#pyflakes-f
"F",
]
exclude = ["*.ipynb"]

View File

@ -1,15 +0,0 @@
# Disable all rules by default
lint.ignore = ["ALL"]
# Enable specific rules
lint.select = [
"S307", # suspicious-eval-usage
"S102", # exec
"T", # print-usage
"W",
# The "F" series in Ruff stands for "Pyflakes" rules, which catch various Python syntax errors and undefined names.
# See all rules here: https://docs.astral.sh/ruff/rules/#pyflakes-f
"F",
]
exclude = ["*.ipynb"]

View File

@ -27,6 +27,7 @@ from comfy.cli_args import args
import comfy.utils
import comfy.model_management
import node_helpers
from comfyui_version import __version__
from app.frontend_management import FrontendManager
from app.user_manager import UserManager
from app.model_manager import ModelFileManager
@ -44,21 +45,6 @@ async def send_socket_catch_exception(function, message):
except (aiohttp.ClientError, aiohttp.ClientPayloadError, ConnectionResetError, BrokenPipeError, ConnectionError) as err:
logging.warning("send error: {}".format(err))
def get_comfyui_version():
comfyui_version = "unknown"
repo_path = os.path.dirname(os.path.realpath(__file__))
try:
import pygit2
repo = pygit2.Repository(repo_path)
comfyui_version = repo.describe(describe_strategy=pygit2.GIT_DESCRIBE_TAGS)
except Exception:
try:
import subprocess
comfyui_version = subprocess.check_output(["git", "describe", "--tags"], cwd=repo_path).decode('utf-8')
except Exception as e:
logging.warning(f"Failed to get ComfyUI version: {e}")
return comfyui_version.strip()
@web.middleware
async def cache_control(request: web.Request, handler):
response: web.Response = await handler(request)
@ -518,7 +504,7 @@ class PromptServer():
"os": os.name,
"ram_total": ram_total,
"ram_free": ram_free,
"comfyui_version": get_comfyui_version(),
"comfyui_version": __version__,
"python_version": sys.version,
"pytorch_version": comfy.model_management.torch_version,
"embedded_python": os.path.split(os.path.split(sys.executable)[0])[1] == "python_embeded",

View File

@ -1,11 +1,22 @@
import pytest
import yaml
import os
import sys
from unittest.mock import Mock, patch, mock_open
from utils.extra_config import load_extra_path_config
import folder_paths
@pytest.fixture()
def clear_folder_paths():
# Clear the global dictionary before each test to ensure isolation
original = folder_paths.folder_names_and_paths.copy()
folder_paths.folder_names_and_paths.clear()
yield
folder_paths.folder_names_and_paths = original
@pytest.fixture
def mock_yaml_content():
return {
@ -15,10 +26,12 @@ def mock_yaml_content():
}
}
@pytest.fixture
def mock_expanded_home():
return '/home/user'
@pytest.fixture
def yaml_config_with_appdata():
return """
@ -27,20 +40,33 @@ def yaml_config_with_appdata():
checkpoints: 'models/checkpoints'
"""
@pytest.fixture
def mock_yaml_content_appdata(yaml_config_with_appdata):
return yaml.safe_load(yaml_config_with_appdata)
@pytest.fixture
def mock_expandvars_appdata():
mock = Mock()
mock.side_effect = lambda path: path.replace('%APPDATA%', 'C:/Users/TestUser/AppData/Roaming')
def expandvars(path):
if '%APPDATA%' in path:
if sys.platform == 'win32':
return path.replace('%APPDATA%', 'C:/Users/TestUser/AppData/Roaming')
else:
return path.replace('%APPDATA%', '/Users/TestUser/AppData/Roaming')
return path
mock.side_effect = expandvars
return mock
@pytest.fixture
def mock_add_model_folder_path():
return Mock()
@pytest.fixture
def mock_expanduser(mock_expanded_home):
def _expanduser(path):
@ -49,10 +75,12 @@ def mock_expanduser(mock_expanded_home):
return path
return _expanduser
@pytest.fixture
def mock_yaml_safe_load(mock_yaml_content):
return Mock(return_value=mock_yaml_content)
@patch('builtins.open', new_callable=mock_open, read_data="dummy file content")
def test_load_extra_model_paths_expands_userpath(
mock_file,
@ -88,6 +116,7 @@ def test_load_extra_model_paths_expands_userpath(
# Check if open was called with the correct file path
mock_file.assert_called_once_with(dummy_yaml_file_name, 'r')
@patch('builtins.open', new_callable=mock_open)
def test_load_extra_model_paths_expands_appdata(
mock_file,
@ -111,7 +140,10 @@ def test_load_extra_model_paths_expands_appdata(
dummy_yaml_file_name = 'dummy_path.yaml'
load_extra_path_config(dummy_yaml_file_name)
expected_base_path = 'C:/Users/TestUser/AppData/Roaming/ComfyUI'
if sys.platform == "win32":
expected_base_path = 'C:/Users/TestUser/AppData/Roaming/ComfyUI'
else:
expected_base_path = '/Users/TestUser/AppData/Roaming/ComfyUI'
expected_calls = [
('checkpoints', os.path.join(expected_base_path, 'models/checkpoints'), False),
]
@ -124,3 +156,148 @@ def test_load_extra_model_paths_expands_appdata(
# Verify that expandvars was called
assert mock_expandvars_appdata.called
@patch("builtins.open", new_callable=mock_open, read_data="dummy yaml content")
@patch("yaml.safe_load")
def test_load_extra_path_config_relative_base_path(
mock_yaml_load, _mock_file, clear_folder_paths, monkeypatch, tmp_path
):
"""
Test that when 'base_path' is a relative path in the YAML, it is joined to the YAML file directory, and then
the items in the config are correctly converted to absolute paths.
"""
sub_folder = "./my_rel_base"
config_data = {
"some_model_folder": {
"base_path": sub_folder,
"is_default": True,
"checkpoints": "checkpoints",
"some_key": "some_value"
}
}
mock_yaml_load.return_value = config_data
dummy_yaml_name = "dummy_file.yaml"
def fake_abspath(path):
if path == dummy_yaml_name:
# If it's the YAML path, treat it like it lives in tmp_path
return os.path.join(str(tmp_path), dummy_yaml_name)
return os.path.join(str(tmp_path), path) # Otherwise, do a normal join relative to tmp_path
def fake_dirname(path):
# We expect path to be the result of fake_abspath(dummy_yaml_name)
if path.endswith(dummy_yaml_name):
return str(tmp_path)
return os.path.dirname(path)
monkeypatch.setattr(os.path, "abspath", fake_abspath)
monkeypatch.setattr(os.path, "dirname", fake_dirname)
load_extra_path_config(dummy_yaml_name)
expected_checkpoints = os.path.abspath(os.path.join(str(tmp_path), sub_folder, "checkpoints"))
expected_some_value = os.path.abspath(os.path.join(str(tmp_path), sub_folder, "some_value"))
actual_paths = folder_paths.folder_names_and_paths["checkpoints"][0]
assert len(actual_paths) == 1, "Should have one path added for 'checkpoints'."
assert actual_paths[0] == expected_checkpoints
actual_paths = folder_paths.folder_names_and_paths["some_key"][0]
assert len(actual_paths) == 1, "Should have one path added for 'some_key'."
assert actual_paths[0] == expected_some_value
@patch("builtins.open", new_callable=mock_open, read_data="dummy yaml content")
@patch("yaml.safe_load")
def test_load_extra_path_config_absolute_base_path(
mock_yaml_load, _mock_file, clear_folder_paths, monkeypatch, tmp_path
):
"""
Test that when 'base_path' is an absolute path, each subdirectory is joined with that absolute path,
rather than being relative to the YAML's directory.
"""
abs_base = os.path.join(str(tmp_path), "abs_base")
config_data = {
"some_absolute_folder": {
"base_path": abs_base, # <-- absolute
"is_default": True,
"loras": "loras_folder",
"embeddings": "embeddings_folder"
}
}
mock_yaml_load.return_value = config_data
dummy_yaml_name = "dummy_abs.yaml"
def fake_abspath(path):
if path == dummy_yaml_name:
# If it's the YAML path, treat it like it is in tmp_path
return os.path.join(str(tmp_path), dummy_yaml_name)
return path # For absolute base, we just return path directly
def fake_dirname(path):
return str(tmp_path) if path.endswith(dummy_yaml_name) else os.path.dirname(path)
monkeypatch.setattr(os.path, "abspath", fake_abspath)
monkeypatch.setattr(os.path, "dirname", fake_dirname)
load_extra_path_config(dummy_yaml_name)
# Expect the final paths to be <abs_base>/loras_folder and <abs_base>/embeddings_folder
expected_loras = os.path.join(abs_base, "loras_folder")
expected_embeddings = os.path.join(abs_base, "embeddings_folder")
actual_loras = folder_paths.folder_names_and_paths["loras"][0]
assert len(actual_loras) == 1, "Should have one path for 'loras'."
assert actual_loras[0] == os.path.abspath(expected_loras)
actual_embeddings = folder_paths.folder_names_and_paths["embeddings"][0]
assert len(actual_embeddings) == 1, "Should have one path for 'embeddings'."
assert actual_embeddings[0] == os.path.abspath(expected_embeddings)
@patch("builtins.open", new_callable=mock_open, read_data="dummy yaml content")
@patch("yaml.safe_load")
def test_load_extra_path_config_no_base_path(
mock_yaml_load, _mock_file, clear_folder_paths, monkeypatch, tmp_path
):
"""
Test that if 'base_path' is not present, each path is joined
with the directory of the YAML file (unless it's already absolute).
"""
config_data = {
"some_folder_without_base": {
"is_default": True,
"text_encoders": "clip",
"diffusion_models": "unet"
}
}
mock_yaml_load.return_value = config_data
dummy_yaml_name = "dummy_no_base.yaml"
def fake_abspath(path):
if path == dummy_yaml_name:
return os.path.join(str(tmp_path), dummy_yaml_name)
return os.path.join(str(tmp_path), path)
def fake_dirname(path):
return str(tmp_path) if path.endswith(dummy_yaml_name) else os.path.dirname(path)
monkeypatch.setattr(os.path, "abspath", fake_abspath)
monkeypatch.setattr(os.path, "dirname", fake_dirname)
load_extra_path_config(dummy_yaml_name)
expected_clip = os.path.join(str(tmp_path), "clip")
expected_unet = os.path.join(str(tmp_path), "unet")
actual_text_encoders = folder_paths.folder_names_and_paths["text_encoders"][0]
assert len(actual_text_encoders) == 1, "Should have one path for 'text_encoders'."
assert actual_text_encoders[0] == os.path.abspath(expected_clip)
actual_diffusion = folder_paths.folder_names_and_paths["diffusion_models"][0]
assert len(actual_diffusion) == 1, "Should have one path for 'diffusion_models'."
assert actual_diffusion[0] == os.path.abspath(expected_unet)

View File

@ -6,6 +6,7 @@ import logging
def load_extra_path_config(yaml_path):
with open(yaml_path, 'r') as stream:
config = yaml.safe_load(stream)
yaml_dir = os.path.dirname(os.path.abspath(yaml_path))
for c in config:
conf = config[c]
if conf is None:
@ -14,6 +15,8 @@ def load_extra_path_config(yaml_path):
if "base_path" in conf:
base_path = conf.pop("base_path")
base_path = os.path.expandvars(os.path.expanduser(base_path))
if not os.path.isabs(base_path):
base_path = os.path.abspath(os.path.join(yaml_dir, base_path))
is_default = False
if "is_default" in conf:
is_default = conf.pop("is_default")
@ -22,10 +25,9 @@ def load_extra_path_config(yaml_path):
if len(y) == 0:
continue
full_path = y
if base_path is not None:
if base_path:
full_path = os.path.join(base_path, full_path)
elif not os.path.isabs(full_path):
yaml_dir = os.path.dirname(os.path.abspath(yaml_path))
full_path = os.path.abspath(os.path.join(yaml_dir, y))
logging.info("Adding extra search path {} {}".format(x, full_path))
folder_paths.add_model_folder_path(x, full_path, is_default)