tokenize reverse prompt when needed

makes this PR compatible with https://github.com/ggerganov/llama.cpp/pull/330
This commit is contained in:
rabidcopy 2023-03-21 12:53:32 -05:00 committed by GitHub
parent 1f4abb8dae
commit 3c211c64bd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -1004,7 +1004,12 @@ int main(int argc, char ** argv) {
// replace end of text token with newline token when in interactive mode
if (id == EOS_TOKEN_ID && params.interactive) {
id = NEWLINE_TOKEN_ID;
if (!antipromptv_inp.empty()) {
if (params.antiprompt.size() != 0) {
// tokenize the reverse prompt to inject
std::vector<std::vector<llama_vocab::id>> antipromptv_inp;
for (auto antiprompt : params.antiprompt){
antipromptv_inp.push_back(::llama_tokenize(vocab, antiprompt, false));
}
// inject the reverse prompt to return control to the user
auto& ap_inp = antipromptv_inp.front();
embd_inp.insert(embd_inp.end(), ap_inp.begin(), ap_inp.end());