From 7d9117ab5aab1111abfe3d50acd95f9187d6e20e Mon Sep 17 00:00:00 2001 From: ngxson Date: Thu, 27 Jun 2024 19:18:20 +0200 Subject: [PATCH] add DeepSeek Lite template --- src/llama.cpp | 21 +++++++++++++++++++-- tests/test-chat-template.cpp | 8 ++++++-- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/src/llama.cpp b/src/llama.cpp index 50edd7f8c..80b81fda0 100644 --- a/src/llama.cpp +++ b/src/llama.cpp @@ -19601,18 +19601,35 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|start_header_id|>assistant<|end_header_id|>\n\n"; } - } else if (tmpl == "minicpm" || tmpl_contains("<\xe7\x94\xa8\xe6\x88\xb7>")) { + } else if (tmpl == "minicpm" || tmpl_contains(u8"<用户>")) { // MiniCPM-3B-OpenHermes-2.5-v2-GGUF + std::string user_tag = u8"<用户>"; for (auto message : chat) { std::string role(message->role); if (role == "user") { - ss << "<\xe7\x94\xa8\xe6\x88\xb7>"; + ss << user_tag; ss << trim(message->content); ss << ""; } else { ss << trim(message->content); } } + } else if (tmpl == "deepseek-lite" || tmpl_contains("'Assistant: ' + message['content'] + eos_token")) { + // DeepSeek-Coder-V2-Lite-Instruct-GGUF + std::string eos_token = u8"<|end▁of▁sentence|>"; + for (auto message : chat) { + std::string role(message->role); + if (role == "system") { + ss << message->content << "\n\n"; + } else if (role == "user") { + ss << "User: " << message->content << "\n\n"; + } else if (role == "assistant") { + ss << "Assistant: " << message->content << eos_token; + } + } + if (add_ass) { + ss << "Assistant: "; + } } else { // template not supported return -1; diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp index e720d79b5..f14f77981 100644 --- a/tests/test-chat-template.cpp +++ b/tests/test-chat-template.cpp @@ -59,7 +59,9 @@ int main(void) { //Phi-3-vision "{% for message in messages %}{{'<|' + message['role'] + '|>' + '\n' + message['content'] + '<|end|>\n' }}{% endfor %}{% if add_generation_prompt and messages[-1]['role'] != 'assistant' %}{{- '<|assistant|>\n' -}}{% endif %}", // MiniCPM-3B-OpenHermes-2.5-v2-GGUF - "{% for message in messages %}{% if message['role'] == 'user' %}{{'<\xe7\x94\xa8\xe6\x88\xb7>' + message['content'].strip() + ''}}{% else %}{{message['content'].strip()}}{% endif %}{% endfor %}", + u8"{% for message in messages %}{% if message['role'] == 'user' %}{{'<用户>' + message['content'].strip() + ''}}{% else %}{{message['content'].strip()}}{% endif %}{% endfor %}", + // DeepSeek-Coder-V2-Lite-Instruct-GGUF + "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{{ bos_token }}{% for message in messages %}{% if message['role'] == 'user' %}{{ 'User: ' + message['content'] + '\n\n' }}{% elif message['role'] == 'assistant' %}{{ 'Assistant: ' + message['content'] + eos_token }}{% elif message['role'] == 'system' %}{{ message['content'] + '\n\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'Assistant:' }}{% endif %}", }; std::vector expected_output = { // teknium/OpenHermes-2.5-Mistral-7B @@ -97,7 +99,9 @@ int main(void) { //Phi-3-vision "<|system|>\nYou are a helpful assistant<|end|>\n<|user|>\nHello<|end|>\n<|assistant|>\nHi there<|end|>\n<|user|>\nWho are you<|end|>\n<|assistant|>\n I am an assistant <|end|>\n<|user|>\nAnother question<|end|>\n<|assistant|>\n", // MiniCPM-3B-OpenHermes-2.5-v2-GGUF - "You are a helpful assistant<\xe7\x94\xa8\xe6\x88\xb7>HelloHi there<\xe7\x94\xa8\xe6\x88\xb7>Who are youI am an assistant<\xe7\x94\xa8\xe6\x88\xb7>Another question", + u8"You are a helpful assistant<用户>HelloHi there<用户>Who are youI am an assistant<用户>Another question", + // DeepSeek-Coder-V2-Lite-Instruct-GGUF + u8"You are a helpful assistant\n\nUser: Hello\n\nAssistant: Hi there<|end▁of▁sentence|>User: Who are you\n\nAssistant: I am an assistant <|end▁of▁sentence|>User: Another question\n\nAssistant: ", }; std::vector formatted_chat(1024); int32_t res;