mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-04-13 15:03:33 +00:00
Merge branch 'comfyanonymous:master' into chroma-support
This commit is contained in:
commit
d781e17045
30
execution.py
30
execution.py
@ -59,14 +59,26 @@ class IsChangedCache:
|
|||||||
self.is_changed[node_id] = node["is_changed"]
|
self.is_changed[node_id] = node["is_changed"]
|
||||||
return self.is_changed[node_id]
|
return self.is_changed[node_id]
|
||||||
|
|
||||||
|
|
||||||
|
class CacheType(Enum):
|
||||||
|
CLASSIC = 0
|
||||||
|
LRU = 1
|
||||||
|
DEPENDENCY_AWARE = 2
|
||||||
|
|
||||||
|
|
||||||
class CacheSet:
|
class CacheSet:
|
||||||
def __init__(self, lru_size=None, cache_none=False):
|
def __init__(self, cache_type=None, cache_size=None):
|
||||||
if cache_none:
|
if cache_type == CacheType.DEPENDENCY_AWARE:
|
||||||
self.init_dependency_aware_cache()
|
self.init_dependency_aware_cache()
|
||||||
elif lru_size is None or lru_size == 0:
|
logging.info("Disabling intermediate node cache.")
|
||||||
self.init_classic_cache()
|
elif cache_type == CacheType.LRU:
|
||||||
|
if cache_size is None:
|
||||||
|
cache_size = 0
|
||||||
|
self.init_lru_cache(cache_size)
|
||||||
|
logging.info("Using LRU cache")
|
||||||
else:
|
else:
|
||||||
self.init_lru_cache(lru_size)
|
self.init_classic_cache()
|
||||||
|
|
||||||
self.all = [self.outputs, self.ui, self.objects]
|
self.all = [self.outputs, self.ui, self.objects]
|
||||||
|
|
||||||
# Performs like the old cache -- dump data ASAP
|
# Performs like the old cache -- dump data ASAP
|
||||||
@ -420,14 +432,14 @@ def execute(server, dynprompt, caches, current_item, extra_data, executed, promp
|
|||||||
return (ExecutionResult.SUCCESS, None, None)
|
return (ExecutionResult.SUCCESS, None, None)
|
||||||
|
|
||||||
class PromptExecutor:
|
class PromptExecutor:
|
||||||
def __init__(self, server, lru_size=None, cache_none=False):
|
def __init__(self, server, cache_type=False, cache_size=None):
|
||||||
self.lru_size = lru_size
|
self.cache_size = cache_size
|
||||||
self.cache_none = cache_none
|
self.cache_type = cache_type
|
||||||
self.server = server
|
self.server = server
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.caches = CacheSet(self.lru_size, self.cache_none)
|
self.caches = CacheSet(cache_type=self.cache_type, cache_size=self.cache_size)
|
||||||
self.status_messages = []
|
self.status_messages = []
|
||||||
self.success = True
|
self.success = True
|
||||||
|
|
||||||
|
8
main.py
8
main.py
@ -156,7 +156,13 @@ def cuda_malloc_warning():
|
|||||||
|
|
||||||
def prompt_worker(q, server_instance):
|
def prompt_worker(q, server_instance):
|
||||||
current_time: float = 0.0
|
current_time: float = 0.0
|
||||||
e = execution.PromptExecutor(server_instance, lru_size=args.cache_lru, cache_none=args.cache_none)
|
cache_type = execution.CacheType.CLASSIC
|
||||||
|
if args.cache_lru > 0:
|
||||||
|
cache_type = execution.CacheType.LRU
|
||||||
|
elif args.cache_none:
|
||||||
|
cache_type = execution.CacheType.DEPENDENCY_AWARE
|
||||||
|
|
||||||
|
e = execution.PromptExecutor(server_instance, cache_type=cache_type, cache_size=args.cache_lru)
|
||||||
last_gc_collect = 0
|
last_gc_collect = 0
|
||||||
need_gc = False
|
need_gc = False
|
||||||
gc_collect_interval = 10.0
|
gc_collect_interval = 10.0
|
||||||
|
Loading…
Reference in New Issue
Block a user