Update llama.cpp
This commit is contained in:
parent
a279acd680
commit
a71cda6546
1 changed files with 7 additions and 8 deletions
|
@ -3,7 +3,6 @@ import ctypes
|
||||||
from ctypes import (
|
from ctypes import (
|
||||||
c_int,
|
c_int,
|
||||||
c_float,
|
c_float,
|
||||||
c_double,
|
|
||||||
c_char_p,
|
c_char_p,
|
||||||
c_void_p,
|
c_void_p,
|
||||||
c_bool,
|
c_bool,
|
||||||
|
@ -40,7 +39,7 @@ class llama_token_data(Structure):
|
||||||
|
|
||||||
llama_token_data_p = POINTER(llama_token_data)
|
llama_token_data_p = POINTER(llama_token_data)
|
||||||
|
|
||||||
llama_progress_callback = ctypes.CFUNCTYPE(None, c_double, c_void_p)
|
llama_progress_callback = ctypes.CFUNCTYPE(None, c_float, c_void_p)
|
||||||
|
|
||||||
|
|
||||||
class llama_context_params(Structure):
|
class llama_context_params(Structure):
|
||||||
|
@ -228,9 +227,9 @@ def llama_sample_top_p_top_k(
|
||||||
last_n_tokens_data, # type: Array[llama_token]
|
last_n_tokens_data, # type: Array[llama_token]
|
||||||
last_n_tokens_size: c_int,
|
last_n_tokens_size: c_int,
|
||||||
top_k: c_int,
|
top_k: c_int,
|
||||||
top_p: c_double,
|
top_p: c_float,
|
||||||
temp: c_double,
|
temp: c_float,
|
||||||
repeat_penalty: c_double,
|
repeat_penalty: c_float,
|
||||||
) -> llama_token:
|
) -> llama_token:
|
||||||
return _lib.llama_sample_top_p_top_k(
|
return _lib.llama_sample_top_p_top_k(
|
||||||
ctx, last_n_tokens_data, last_n_tokens_size, top_k, top_p, temp, repeat_penalty
|
ctx, last_n_tokens_data, last_n_tokens_size, top_k, top_p, temp, repeat_penalty
|
||||||
|
@ -242,9 +241,9 @@ _lib.llama_sample_top_p_top_k.argtypes = [
|
||||||
llama_token_p,
|
llama_token_p,
|
||||||
c_int,
|
c_int,
|
||||||
c_int,
|
c_int,
|
||||||
c_double,
|
c_float,
|
||||||
c_double,
|
c_float,
|
||||||
c_double,
|
c_float,
|
||||||
]
|
]
|
||||||
_lib.llama_sample_top_p_top_k.restype = llama_token
|
_lib.llama_sample_top_p_top_k.restype = llama_token
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue