From a439fe15295657bf6cdc4d06a7d6cce92c8c6902 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Fri, 12 May 2023 14:28:22 -0400 Subject: [PATCH] Allow model to tokenize strings longer than context length and set add_bos. Closes #92 --- examples/llama_cpp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llama_cpp.py b/examples/llama_cpp.py index a56243dc9..f2366effe 100644 --- a/examples/llama_cpp.py +++ b/examples/llama_cpp.py @@ -337,7 +337,7 @@ def llama_tokenize( tokens, # type: Array[llama_token] n_max_tokens: c_int, add_bos: c_bool, -) -> c_int: +) -> int: return _lib.llama_tokenize(ctx, text, tokens, n_max_tokens, add_bos)