From f20b34a3beb550761e11c2f0dee55ed755670a8c Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Fri, 5 May 2023 14:22:55 -0400 Subject: [PATCH] Add return type annotations for embeddings and logits --- examples/llama_cpp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/llama_cpp.py b/examples/llama_cpp.py index fce7fce1c..e6638ed17 100644 --- a/examples/llama_cpp.py +++ b/examples/llama_cpp.py @@ -381,7 +381,7 @@ _lib.llama_n_embd.restype = c_int # Can be mutated in order to change the probabilities of the next token # Rows: n_tokens # Cols: n_vocab -def llama_get_logits(ctx: llama_context_p): +def llama_get_logits(ctx: llama_context_p): # type: (...) -> Array[float] # type: ignore return _lib.llama_get_logits(ctx) @@ -391,7 +391,7 @@ _lib.llama_get_logits.restype = POINTER(c_float) # Get the embeddings for the input # shape: [n_embd] (1-dimensional) -def llama_get_embeddings(ctx: llama_context_p): +def llama_get_embeddings(ctx: llama_context_p): # type: (...) -> Array[float] # type: ignore return _lib.llama_get_embeddings(ctx)