From 0e94ea65c6c311d3cf3e0d4fed0620c33708cae4 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 May 2023 17:41:26 +0300 Subject: [PATCH] perplexity : add clarifying comments --- examples/perplexity/perplexity.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 5cfab3b18..9212dee5c 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -54,6 +54,7 @@ void perplexity(llama_context * ctx, const gpt_params & params) { // save original token and restore it after eval const auto token_org = tokens[batch_start]; + // add BOS token for the first batch of each chunk if (j == 0) { tokens[batch_start] = llama_token_bos(); } @@ -63,6 +64,7 @@ void perplexity(llama_context * ctx, const gpt_params & params) { return; } + // restore the original token in case it was set to BOS tokens[batch_start] = token_org; const auto batch_logits = llama_get_logits(ctx);