From bf60b6a149b3e78435d4bdb2aa29c67a5ffc6af2 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Fri, 28 Jul 2023 11:57:31 +0200 Subject: [PATCH] common.cpp : alter wording --- examples/common.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/common.cpp b/examples/common.cpp index 8175ab66c..fe7308b17 100644 --- a/examples/common.cpp +++ b/examples/common.cpp @@ -565,8 +565,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { fprintf(stdout, " not recommended: doubles context memory required and no measurable increase in quality\n"); fprintf(stdout, " --temp N temperature (default: %.1f)\n", (double)params.temp); fprintf(stdout, " --perplexity compute perplexity over each ctx window of the prompt\n"); - fprintf(stdout, " --hellaswag compute HellaSwag score over random examples from datafile supplied with -f\n"); - fprintf(stdout, " --hellaswag-tasks N number of examples to use when computing the HellaSwag score (default: %d)\n", params.hellaswag_tasks); + fprintf(stdout, " --hellaswag compute HellaSwag score over random tasks from datafile supplied with -f\n"); + fprintf(stdout, " --hellaswag-tasks N number of tasks to use when computing the HellaSwag score (default: %d)\n", params.hellaswag_tasks); fprintf(stdout, " --keep N number of tokens to keep from the initial prompt (default: %d, -1 = all)\n", params.n_keep); fprintf(stdout, " --chunks N max number of chunks to process (default: %d, -1 = all)\n", params.n_chunks); if (llama_mlock_supported()) {