change template for inference
This commit is contained in:
parent
bbf1aaa7ed
commit
460212ac2a
1 changed files with 1 additions and 1 deletions
|
@ -274,7 +274,7 @@ int main(int argc, char ** argv) {
|
|||
if (params.omni_vlm_version == "vlm-81-ocr") {
|
||||
params.prompt = "<|im_start|>system\nYou are Nano-Omni-VLM, created by Nexa AI. You are a helpful assistant.<|im_end|>\n<|im_start|>user\n <|vision_start|><|image_pad|><|vision_end|><|im_end|>";
|
||||
} else if (params.omni_vlm_version == "vlm-81-instruct" || params.omni_vlm_version == "nano-vlm-instruct") {
|
||||
params.prompt = "<|im_start|>system\nYou are Nano-Omni-VLM, created by Nexa AI. You are a helpful assistant.<|im_end|>\n<|im_start|>user\n" + params.prompt + "\n<|vision_start|><|image_pad|><|vision_end|><|im_end|>";
|
||||
params.prompt = "<|im_start|>system\nYou are Nano-Omni-VLM, created by Nexa AI. You are a helpful assistant.<|im_end|>\n<|im_start|>user\n\n<|vision_start|><|image_pad|><|vision_end|>" + params.prompt + "<|im_end|>";
|
||||
} else {
|
||||
LOG_TEE("%s : error: you set wrong vlm version info:'%s'.\n", __func__, params.omni_vlm_version.c_str());
|
||||
print_usage(argc, argv, {});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue