server: fix typo
This commit is contained in:
parent
2ebedda3d9
commit
27976c31b6
1 changed files with 2 additions and 2 deletions
|
@ -20,7 +20,7 @@ inline static json oaicompat_completion_params_parse(
|
|||
{
|
||||
json llama_params;
|
||||
bool using_chatml = chat_template == "chatml";
|
||||
std::string formated_prompt = using_chatml
|
||||
std::string formatted_prompt = using_chatml
|
||||
? format_chatml(body["messages"]) // OpenAI 'messages' to chatml
|
||||
: format_mistral(body["messages"]); // OpenAI 'messages' to mistral format
|
||||
|
||||
|
@ -35,7 +35,7 @@ inline static json oaicompat_completion_params_parse(
|
|||
// https://platform.openai.com/docs/api-reference/chat/create
|
||||
llama_sampling_params default_sparams;
|
||||
llama_params["model"] = json_value(body, "model", std::string("unknown"));
|
||||
llama_params["prompt"] = formated_prompt;
|
||||
llama_params["prompt"] = formatted_prompt;
|
||||
llama_params["cache_prompt"] = json_value(body, "cache_prompt", false);
|
||||
llama_params["temperature"] = json_value(body, "temperature", 0.0);
|
||||
llama_params["top_k"] = json_value(body, "top_k", default_sparams.top_k);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue