server : allow to specify custom prompt for penalty calculation (#3727)

This commit is contained in:
Alexey Parfenov 2023-12-23 09:31:49 +00:00 committed by GitHub
parent b9ec82d262
commit 6123979952
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 54 additions and 3 deletions

View file

@ -36,6 +36,9 @@ typedef struct llama_sampling_params {
float cfg_scale = 1.f; // how strong is guidance
std::unordered_map<llama_token, float> logit_bias; // logit bias for specific tokens
std::vector<llama_token> penalty_prompt_tokens;
bool use_penalty_prompt_tokens = false;
} llama_sampling_params;
// general sampler context