tokenize reverse prompt when needed
makes this PR compatible with https://github.com/ggerganov/llama.cpp/pull/330
This commit is contained in:
parent
1f4abb8dae
commit
3c211c64bd
1 changed files with 6 additions and 1 deletions
7
main.cpp
7
main.cpp
|
@ -1004,7 +1004,12 @@ int main(int argc, char ** argv) {
|
|||
// replace end of text token with newline token when in interactive mode
|
||||
if (id == EOS_TOKEN_ID && params.interactive) {
|
||||
id = NEWLINE_TOKEN_ID;
|
||||
if (!antipromptv_inp.empty()) {
|
||||
if (params.antiprompt.size() != 0) {
|
||||
// tokenize the reverse prompt to inject
|
||||
std::vector<std::vector<llama_vocab::id>> antipromptv_inp;
|
||||
for (auto antiprompt : params.antiprompt){
|
||||
antipromptv_inp.push_back(::llama_tokenize(vocab, antiprompt, false));
|
||||
}
|
||||
// inject the reverse prompt to return control to the user
|
||||
auto& ap_inp = antipromptv_inp.front();
|
||||
embd_inp.insert(embd_inp.end(), ap_inp.begin(), ap_inp.end());
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue