From 186ecfd8a4567c277945dca434d26569d973564d Mon Sep 17 00:00:00 2001 From: Slaren <2141330+slaren@users.noreply.github.com> Date: Fri, 24 Mar 2023 23:46:02 +0100 Subject: [PATCH] Remove printing of prompt and prompt tokenization at startup --- main.cpp | 5 ----- 1 file changed, 5 deletions(-) diff --git a/main.cpp b/main.cpp index 3f49ad997..12c68ea7a 100644 --- a/main.cpp +++ b/main.cpp @@ -276,12 +276,7 @@ int main(int argc, char ** argv) { auto llama_token_newline = ::llama_tokenize(ctx, "\n", false); fprintf(stderr, "\n"); - fprintf(stderr, "%s: prompt: '%s'\n", __func__, params.prompt.c_str()); fprintf(stderr, "%s: number of tokens in prompt = %zu\n", __func__, embd_inp.size()); - for (int i = 0; i < (int) embd_inp.size(); i++) { - fprintf(stderr, "%6d -> '%s'\n", embd_inp[i], llama_token_to_str(ctx, embd_inp[i])); - } - fprintf(stderr, "\n"); if (params.interactive) { #if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) struct sigaction sigint_action;