From 549291fe61f51f813f9b44a6dcb1f03fa7074858 Mon Sep 17 00:00:00 2001 From: Henri Vasserman Date: Sun, 28 May 2023 12:08:37 +0300 Subject: [PATCH] keep processed from the beginning this means no limit to the input prompt, it will just get reset again as normal --- examples/server/server.cpp | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1ee6ce1d1..c95226e66 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -100,7 +100,7 @@ struct llama_server_context // Reset context const int n_left = n_past - params.n_keep; n_past = std::max(1, params.n_keep); - processed_tokens.erase(processed_tokens.begin() + n_past, processed_tokens.end()); + //processed_tokens.erase(processed_tokens.begin() + n_past, processed_tokens.end()); embd.insert(embd.begin(), last_n_tokens.begin() + params.n_ctx - n_left / 2 - embd.size(), last_n_tokens.end() - embd.size()); } for (int i = 0; i < (int)embd.size(); i += params.n_batch) @@ -499,10 +499,6 @@ bool parse_options_completion(json body, llama_server_context& llama, Response & { llama.params.n_keep = body["n_keep"].get(); } - if (!body["as_loop"].is_null()) - { - llama.as_loop = body["as_loop"].get(); - } if (!body["prompt"].is_null()) { llama.params.prompt = body["prompt"].get();