Fix memory estimation bug with kontext. (#8709)

This commit is contained in:
comfyanonymous 2025-06-27 14:21:12 -07:00 committed by GitHub
parent 9093301a49
commit c36be0ea09
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -816,7 +816,7 @@ class PixArt(BaseModel):
class Flux(BaseModel):
def __init__(self, model_config, model_type=ModelType.FLUX, device=None, unet_model=comfy.ldm.flux.model.Flux):
super().__init__(model_config, model_type, device=device, unet_model=unet_model)
self.memory_usage_factor_conds = ("kontext",)
self.memory_usage_factor_conds = ("ref_latents",)
def concat_cond(self, **kwargs):
try: