Put node_info caching in memory,and now 30x speedup when loading the page.

This commit is contained in:
meimeilook 2025-03-15 14:08:57 +08:00
parent d40ac012bc
commit 4b2b24906a

View File

@ -21,6 +21,7 @@ from io import BytesIO
import aiohttp
from aiohttp import web
import logging
from functools import lru_cache
import mimetypes
from comfy.cli_args import args
@ -552,6 +553,7 @@ class PromptServer():
async def get_prompt(request):
return web.json_response(self.get_queue_info())
# use getattr speedup 2x times in load node info
@lru_cache(maxsize=None)
def node_info(node_class):
obj_class = nodes.NODE_CLASS_MAPPINGS[node_class]
input_types = obj_class.INPUT_TYPES()
@ -587,6 +589,11 @@ class PromptServer():
except Exception:
logging.error(f"[ERROR] An error occurred while retrieving information for the '{x}' node.")
logging.error(traceback.format_exc())
# Debug node_info in the current memory cache
#cache_stats = node_info.cache_info()
#print(f"node_info Cache Hits: {cache_stats.hits}, Misses: {cache_stats.misses}, Current Memory Cache Size: {cache_stats.currsize}")
response = web.json_response(out)
response.enable_compression()
return response