server: clean up using_chatml variable
Co-authored-by: Jared Van Bortel <cebtenzzre@gmail.com>
This commit is contained in:
parent
ebe3079539
commit
1a27406426
1 changed files with 1 additions and 2 deletions
|
@ -19,8 +19,7 @@ inline static json oaicompat_completion_params_parse(
|
|||
const std::string &chat_template)
|
||||
{
|
||||
json llama_params;
|
||||
bool using_chatml = chat_template == "chatml";
|
||||
std::string formatted_prompt = using_chatml
|
||||
std::string formatted_prompt = chat_template == "chatml"
|
||||
? format_chatml(body["messages"]) // OpenAI 'messages' to chatml (with <|im_start|>,...)
|
||||
: format_llama2(body["messages"]); // OpenAI 'messages' to llama2 (with [INST],...)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue