llama : add llama_vocab
, functions -> methods, naming (#11110)
* llama : functions -> methods (#11110) * llama : add struct llama_vocab to the API (#11156) ggml-ci * hparams : move vocab params to llama_vocab (#11159) ggml-ci * vocab : more pimpl (#11165) ggml-ci * vocab : minor tokenization optimizations (#11160) ggml-ci Co-authored-by: Diego Devesa <slarengh@gmail.com> * lora : update API names (#11167) ggml-ci * llama : update API names to use correct prefix (#11174) * llama : update API names to use correct prefix ggml-ci * cont ggml-ci * cont ggml-ci * minor [no ci] * vocab : llama_vocab_add_[be]os -> llama_vocab_get_add_[be]os (#11174) ggml-ci * vocab : llama_vocab_n_vocab -> llama_vocab_n_tokens (#11174) ggml-ci --------- Co-authored-by: Diego Devesa <slarengh@gmail.com>
This commit is contained in:
parent
c05e8c9934
commit
afa8a9ec9b
68 changed files with 5855 additions and 5400 deletions
|
@ -52,8 +52,8 @@ actor LlamaContext {
|
|||
deinit {
|
||||
llama_sampler_free(sampling)
|
||||
llama_batch_free(batch)
|
||||
llama_model_free(model)
|
||||
llama_free(context)
|
||||
llama_free_model(model)
|
||||
llama_backend_free()
|
||||
}
|
||||
|
||||
|
@ -65,7 +65,7 @@ actor LlamaContext {
|
|||
model_params.n_gpu_layers = 0
|
||||
print("Running on simulator, force use n_gpu_layers = 0")
|
||||
#endif
|
||||
let model = llama_load_model_from_file(path, model_params)
|
||||
let model = llama_model_load_from_file(path, model_params)
|
||||
guard let model else {
|
||||
print("Could not load model at \(path)")
|
||||
throw LlamaError.couldNotInitializeContext
|
||||
|
@ -151,7 +151,7 @@ actor LlamaContext {
|
|||
|
||||
new_token_id = llama_sampler_sample(sampling, context, batch.n_tokens - 1)
|
||||
|
||||
if llama_token_is_eog(model, new_token_id) || n_cur == n_len {
|
||||
if llama_vocab_is_eog(model, new_token_id) || n_cur == n_len {
|
||||
print("\n")
|
||||
is_done = true
|
||||
let new_token_str = String(cString: temporary_invalid_cchars + [0])
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue