llama : llama_chat_apply_template support null buf
This commit is contained in:
parent
7635b13ad7
commit
f84809b7ad
1 changed files with 3 additions and 1 deletions
|
@ -13544,7 +13544,9 @@ LLAMA_API int32_t llama_chat_apply_template(
|
||||||
if (res < 0) {
|
if (res < 0) {
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
strncpy(buf, formatted_chat.c_str(), length);
|
if (buf && length > 0) {
|
||||||
|
strncpy(buf, formatted_chat.c_str(), length);
|
||||||
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue