Allow model to tokenize strings longer than context length and set add_bos. Closes #92
This commit is contained in:
parent
b5531e1435
commit
a439fe1529
1 changed files with 1 additions and 1 deletions
|
@ -337,7 +337,7 @@ def llama_tokenize(
|
||||||
tokens, # type: Array[llama_token]
|
tokens, # type: Array[llama_token]
|
||||||
n_max_tokens: c_int,
|
n_max_tokens: c_int,
|
||||||
add_bos: c_bool,
|
add_bos: c_bool,
|
||||||
) -> c_int:
|
) -> int:
|
||||||
return _lib.llama_tokenize(ctx, text, tokens, n_max_tokens, add_bos)
|
return _lib.llama_tokenize(ctx, text, tokens, n_max_tokens, add_bos)
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue