Fix for Top K disabling (#480)

* Update gpttype_adapter.cpp

* use n_vocab instead of 32000 for when top k is off
This commit is contained in:
kalomaze 2023-10-19 10:20:44 -05:00 committed by GitHub
parent 8c6001de2a
commit ddce116ec9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -1312,7 +1312,7 @@ generation_outputs gpttype_generate(const generation_inputs inputs, generation_o
} }
if (params.top_k < 1) if (params.top_k < 1)
{ {
params.top_k = 120; //to disable top_k we actually need to increase this value to a very high number params.top_k = n_vocab; // all tokens in the vocabulary should be considered if top k is disabled
} }
if (params.seed <= 0 || params.seed==0xFFFFFFFF) if (params.seed <= 0 || params.seed==0xFFFFFFFF)
{ {