From 636d4bfb8994c7f123f15971af5d38a9754377ab Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Sat, 19 Apr 2025 15:55:43 -0400 Subject: [PATCH] Fix hard crash when the spiece tokenizer path is bad. --- comfy/text_encoders/spiece_tokenizer.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/comfy/text_encoders/spiece_tokenizer.py b/comfy/text_encoders/spiece_tokenizer.py index 21df4f86..caccb3ca 100644 --- a/comfy/text_encoders/spiece_tokenizer.py +++ b/comfy/text_encoders/spiece_tokenizer.py @@ -1,4 +1,5 @@ import torch +import os class SPieceTokenizer: @staticmethod @@ -15,6 +16,8 @@ class SPieceTokenizer: if isinstance(tokenizer_path, bytes): self.tokenizer = sentencepiece.SentencePieceProcessor(model_proto=tokenizer_path, add_bos=self.add_bos, add_eos=self.add_eos) else: + if not os.path.isfile(tokenizer_path): + raise ValueError("invalid tokenizer") self.tokenizer = sentencepiece.SentencePieceProcessor(model_file=tokenizer_path, add_bos=self.add_bos, add_eos=self.add_eos) def get_vocab(self):