fix content of format_final_response
This commit is contained in:
parent
1bef2dcf87
commit
af1ea58b60
1 changed files with 3 additions and 3 deletions
|
@ -1031,7 +1031,7 @@ static json format_final_response(llama_server_context &llama, const std::string
|
||||||
{
|
{
|
||||||
|
|
||||||
json res = json{
|
json res = json{
|
||||||
{"content", ""},
|
{"content", content},
|
||||||
{"stop", true},
|
{"stop", true},
|
||||||
{"model", llama.params.model_alias},
|
{"model", llama.params.model_alias},
|
||||||
{"tokens_predicted", llama.num_tokens_predicted},
|
{"tokens_predicted", llama.num_tokens_predicted},
|
||||||
|
@ -1332,10 +1332,10 @@ int main(int argc, char **argv)
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llama.has_next_token) {
|
if (!llama.has_next_token) {
|
||||||
// Generation is done, send extra information.
|
// Generation is done, send extra information.
|
||||||
const json data = format_final_response(llama, to_send, llama.generated_token_probs);
|
const json data = format_final_response(llama, "", llama.generated_token_probs);
|
||||||
|
|
||||||
const std::string str =
|
const std::string str =
|
||||||
"data: " +
|
"data: " +
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue