From 89eaf5036a07b394bea4c7b35f97d171bee2e87b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 16 Dec 2024 21:03:24 +0200 Subject: [PATCH] server : add "tokens" output ggml-ci --- examples/server/server.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 5ed4e8d27..57db582d7 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -687,8 +687,6 @@ struct server_task_result_cmpl_partial : server_task_result { json second_ret = json{ {"choices", json::array({json{{"finish_reason", nullptr}, {"index", 0}, - {"delta", json { - {"content", content}}} }})}, {"created", t}, {"id", oaicompat_cmpl_id}, @@ -704,6 +702,7 @@ struct server_task_result_cmpl_partial : server_task_result { {"delta", json { {"content", content}, + {"tokens", tokens} }}, }}); } @@ -1017,6 +1016,7 @@ struct server_slot { n_prompt_tokens = 0; last_nl_pos = 0; generated_text = ""; + generated_tokens = {}; has_new_line = false; truncated = false; stop = STOP_TYPE_NONE;