Support for multiple reverse prompts. (#299)

Co-authored-by: Johnman <>
Co-authored-by: Johnman <tjohnman@github>
This commit is contained in:
tjohnman 2023-03-19 20:33:06 +01:00 committed by GitHub
parent 7392f1cd2c
commit 24568371ae
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 27 additions and 16 deletions

View file

@ -30,15 +30,15 @@ struct gpt_params {
std::string model = "models/lamma-7B/ggml-model.bin"; // model path
std::string prompt = "";
std::string antiprompt = ""; // string upon seeing which more user input is prompted
bool random_prompt = false;
bool use_color = false; // use color to distinguish generations and inputs
bool interactive = false; // interactive mode
bool interactive_start = false; // reverse prompt immediately
std::vector<std::string> antiprompt; // string upon seeing which more user input is prompted
bool instruct = false; // instruction mode (used for Alpaca models)
bool ignore_eos = false; // do not stop generating after eos
};