diff --git a/src/llama.cpp b/src/llama.cpp
index 4a9903cc3..058184d95 100644
--- a/src/llama.cpp
+++ b/src/llama.cpp
@@ -21600,6 +21600,14 @@ static int32_t llama_chat_apply_template_internal(
if (add_ass) {
ss << "Assistant:";
}
+ } else if (tmpl == "lite-mistral" || tmpl_contains("'system\n' + message['content'] + '\n'")) {
+ // OuteAI/Lite-Mistral-150M-v2-Instruct
+ for (auto message : chat) {
+ ss << "" << message->role << "\n" << message->content << "\n";
+ }
+ if (add_ass) {
+ ss << "assistant\n";
+ }
} else {
// template not supported
return -1;
diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp
index 6583dd0b2..4a9d6f350 100644
--- a/tests/test-chat-template.cpp
+++ b/tests/test-chat-template.cpp
@@ -66,6 +66,8 @@ int main(void) {
u8"{% for message in messages %}{% if message['role'] == 'user' %}{{'<用户>' + message['content'].strip() + ''}}{% else %}{{message['content'].strip()}}{% endif %}{% endfor %}",
// DeepSeek-V2
"{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{{ bos_token }}{% for message in messages %}{% if message['role'] == 'user' %}{{ 'User: ' + message['content'] + '\n\n' }}{% elif message['role'] == 'assistant' %}{{ 'Assistant: ' + message['content'] + eos_token }}{% elif message['role'] == 'system' %}{{ message['content'] + '\n\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'Assistant:' }}{% endif %}",
+ // OuteAI/Lite-Mistral-150M-v2-Instruct
+ "{% for message in messages %}{% if message['role'] == 'user' %}{{'user\n' + message['content'] + '\n'}}{% elif message['role'] == 'assistant' %}{{'assistant\n' + message['content'] + '\n'}}{% elif message['role'] == 'system' %}{{'system\n' + message['content'] + '\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{'assistant\n'}}{% endif %}",
};
std::vector expected_output = {
// teknium/OpenHermes-2.5-Mistral-7B
@@ -110,6 +112,8 @@ int main(void) {
u8"You are a helpful assistant<用户>HelloHi there<用户>Who are youI am an assistant<用户>Another question",
// DeepSeek-V2
u8"You are a helpful assistant\n\nUser: Hello\n\nAssistant: Hi there<|end▁of▁sentence|>User: Who are you\n\nAssistant: I am an assistant <|end▁of▁sentence|>User: Another question\n\nAssistant:",
+ // OuteAI/Lite-Mistral-150M-v2-Instruct
+ "user\nHello\nassistant\nHi there\nuser\nWho are you\nassistant\n I am an assistant \nuser\nAnother question\nassistant\n",
};
std::vector formatted_chat(1024);
int32_t res;