server: fix llama_sample_top_k order
This commit is contained in:
parent
b5c5c8e2b9
commit
bc88fece87
1 changed files with 1 additions and 1 deletions
|
@ -378,10 +378,10 @@ struct llama_server_context {
|
||||||
} else {
|
} else {
|
||||||
// Temperature sampling
|
// Temperature sampling
|
||||||
size_t min_keep = std::max(1, n_probs);
|
size_t min_keep = std::max(1, n_probs);
|
||||||
|
llama_sample_top_k(ctx, &candidates_p, top_k, min_keep);
|
||||||
llama_sample_tail_free(ctx, &candidates_p, tfs_z, min_keep);
|
llama_sample_tail_free(ctx, &candidates_p, tfs_z, min_keep);
|
||||||
llama_sample_typical(ctx, &candidates_p, typical_p, min_keep);
|
llama_sample_typical(ctx, &candidates_p, typical_p, min_keep);
|
||||||
llama_sample_top_p(ctx, &candidates_p, top_p, min_keep);
|
llama_sample_top_p(ctx, &candidates_p, top_p, min_keep);
|
||||||
llama_sample_top_k(ctx, &candidates_p, top_k, min_keep);
|
|
||||||
llama_sample_temperature(ctx, &candidates_p, temp);
|
llama_sample_temperature(ctx, &candidates_p, temp);
|
||||||
result.tok = llama_sample_token(ctx, &candidates_p);
|
result.tok = llama_sample_token(ctx, &candidates_p);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue