slight clarity fix
This commit is contained in:
parent
96fb12cfa2
commit
bee6a401fd
2 changed files with 2 additions and 2 deletions
|
@ -353,7 +353,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs, generation_o
|
||||||
if (concat_output.find(matched) != std::string::npos)
|
if (concat_output.find(matched) != std::string::npos)
|
||||||
{
|
{
|
||||||
remaining_tokens = 0;
|
remaining_tokens = 0;
|
||||||
printf("\n(Stop sequence triggered: %s)",matched.c_str());
|
printf("\n(Stop sequence triggered: <%s>)",matched.c_str());
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -246,7 +246,7 @@ generation_outputs llama_generate(const generation_inputs inputs, generation_out
|
||||||
if (concat_output.find(matched) != std::string::npos)
|
if (concat_output.find(matched) != std::string::npos)
|
||||||
{
|
{
|
||||||
remaining_tokens = 0;
|
remaining_tokens = 0;
|
||||||
printf("\n(Stop sequence triggered: %s)",matched.c_str());
|
printf("\n(Stop sequence triggered: <%s>)",matched.c_str());
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue