Update server.cpp

This commit is contained in:
Maximilian Winter 2024-01-27 06:30:07 +01:00
parent aa14068a2b
commit c8bc9297c0

View file

@ -1555,10 +1555,6 @@ struct llama_server_context
// we have to evaluate at least 1 token to generate logits.
LOG_TEE("slot %d : we have to evaluate at least 1 token to generate logits\n", slot.id);
slot.n_past--;
if(slot.n_past_self_extension > 0)
{
slot.n_past_self_extension--;
}
}
LOG_VERBOSE("prompt ingested", {
@ -1613,12 +1609,6 @@ struct llama_server_context
return true;
}
std::vector<int32_t> slot_npasts;
for (auto & slot : slots)
{
slot_npasts.emplace_back(0);
}
for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += n_batch)
{
const int32_t n_tokens = std::min(n_batch, (int32_t) (batch.n_tokens - i));