Missing tokenizer.model error during gguf conversion

This commit is contained in:
Abhishek Gopinath Kovath 2024-04-03 03:31:17 +05:30
parent f87f7b8986
commit 57ce61a307

View file

@ -324,7 +324,7 @@ class Model(ABC):
if not tokenizer_path.is_file(): if not tokenizer_path.is_file():
print(f'Error: Missing {tokenizer_path}', file=sys.stderr) print(f'Error: Missing {tokenizer_path}', file=sys.stderr)
sys.exit(1) raise FileNotFoundError(f"File not found: {tokenizer_path}")
tokenizer = SentencePieceProcessor(str(tokenizer_path)) tokenizer = SentencePieceProcessor(str(tokenizer_path))
vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size()) vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size())