Missing tokenizer.model error during gguf conversion
This commit is contained in:
parent
f87f7b8986
commit
57ce61a307
1 changed files with 1 additions and 1 deletions
|
@ -324,7 +324,7 @@ class Model(ABC):
|
||||||
|
|
||||||
if not tokenizer_path.is_file():
|
if not tokenizer_path.is_file():
|
||||||
print(f'Error: Missing {tokenizer_path}', file=sys.stderr)
|
print(f'Error: Missing {tokenizer_path}', file=sys.stderr)
|
||||||
sys.exit(1)
|
raise FileNotFoundError(f"File not found: {tokenizer_path}")
|
||||||
|
|
||||||
tokenizer = SentencePieceProcessor(str(tokenizer_path))
|
tokenizer = SentencePieceProcessor(str(tokenizer_path))
|
||||||
vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size())
|
vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size())
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue